text
stringlengths 3
1.05M
|
|---|
import sqlalchemy as sa
from sqlservice import event
from .fixtures import Model, parametrize
class EventModel(Model):
__tablename__ = "test_events"
id = sa.Column(sa.types.Integer(), primary_key=True)
@event.on_set("id")
def on_set(self, value, oldvalue, initator):
pass
@event.on_append("id")
def on_append(self, value, oldvalue, initator):
pass
@event.on_bulk_replace("id")
def on_bulk_replace(self, value, oldvalue, initator):
pass
@event.on_remove("id")
def on_remove(self, value, oldvalue, initator):
pass
@event.on_init_scalar("id")
def on_init_scalar(self, value, dict_):
pass
@event.on_init_collection("id")
def on_init_collection(self, collection, collection_adapter):
pass
@event.on_dispose_collection("id")
def on_dispose_collection(self, collection, collection_adapter):
pass
@event.on_modified("id")
def on_modified(self, initator):
pass
@event.before_delete()
def before_delete(mapper, connection, self):
pass
@event.before_insert()
def before_insert(mapper, connection, self):
pass
@event.before_update()
def before_update(mapper, connection, self):
pass
@event.before_save()
def before_save(mapper, connection, self):
pass
@event.after_delete()
def after_delete(mapper, connection, self):
pass
@event.after_insert()
def after_insert(mapper, connection, self):
pass
@event.after_update()
def after_update(mapper, connection, self):
pass
@event.after_save()
def after_save(mapper, connection, self):
pass
@event.on_expire()
def on_expire(self, attrs):
pass
@event.on_load()
def on_load(self, context):
pass
@event.on_refresh()
def on_refresh(self, context, attrs):
pass
@parametrize(
"target,event,listener",
[
(EventModel.id, "set", EventModel.on_set),
(EventModel.id, "append", EventModel.on_append),
(EventModel.id, "remove", EventModel.on_remove),
(EventModel.id, "init_scalar", EventModel.on_init_scalar),
(EventModel.id, "init_collection", EventModel.on_init_collection),
(EventModel.id, "dispose_collection", EventModel.on_dispose_collection),
(EventModel.id, "modified", EventModel.on_modified),
(EventModel.id, "bulk_replace", EventModel.on_bulk_replace),
(EventModel, "before_delete", EventModel.before_delete),
(EventModel, "before_insert", EventModel.before_insert),
(EventModel, "before_update", EventModel.before_update),
(EventModel, "before_insert", EventModel.before_save),
(EventModel, "before_update", EventModel.before_save),
(EventModel, "after_delete", EventModel.after_delete),
(EventModel, "after_insert", EventModel.after_insert),
(EventModel, "after_update", EventModel.after_update),
(EventModel, "after_insert", EventModel.after_save),
(EventModel, "after_update", EventModel.after_save),
(EventModel, "expire", EventModel.on_expire),
(EventModel, "load", EventModel.on_load),
(EventModel, "refresh", EventModel.on_refresh),
],
)
def test_events(target, event, listener):
"""Test that event listeners are properly registered."""
assert sa.event.contains(target, event, listener)
|
#!/usr/bin/env python
# Copyright 2018 Palo Alto Networks, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: panos_software
short_description: Install specific release of PAN-OS.
description:
- Install specific release of PAN-OS.
author: "Michael Richardson (@mrichardson03)"
version_added: "2.6"
requirements:
- pan-python can be obtained from PyPi U(https://pypi.python.org/pypi/pan-python)
- pandevice can be obtained from PyPi U(https://pypi.python.org/pypi/pandevice)
notes:
- Checkmode is not supported.
- Panorama is supported.
options:
ip_address:
description:
- IP address or hostname of PAN-OS device.
required: true
username:
description:
- Username for authentication for PAN-OS device. Optional if I(api_key) is used.
default: 'admin'
password:
description:
- Password for authentication for PAN-OS device. Optional if I(api_key) is used.
api_key:
description:
- API key to be used instead of I(username) and I(password).
version:
description:
- Desired PAN-OS release.
required: true
restart:
description:
- Restart device after installing desired version. Use in conjunction with
panos_check to determine when firewall is ready again.
default: false
'''
EXAMPLES = '''
- name: Install PAN-OS 7.1.16 and restart
panos_software:
ip_address: '{{ fw_ip_address }}'
username: '{{ fw_username }}'
password: '{{ fw_password }}'
version: '7.1.16'
restart: true
'''
RETURN = '''
version:
description: After performing the software install, returns the version installed on the
device.
'''
from ansible.module_utils.basic import AnsibleModule
try:
from pandevice import PanOSVersion
from pandevice.errors import PanDeviceError
from pandevice import base
HAS_LIB = True
except ImportError:
HAS_LIB = False
def main():
argument_spec = dict(
ip_address=dict(required=True),
username=dict(default='admin'),
password=dict(no_log=True),
api_key=dict(no_log=True),
version=dict(type='str', required=True),
restart=dict(type='bool', default=False)
)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False)
if not HAS_LIB:
module.fail_json(msg='pan-python and pandevice are required for this module.')
ip_address = module.params['ip_address']
username = module.params['username']
password = module.params['password']
api_key = module.params['api_key']
version = module.params['version']
restart = module.params['restart']
changed = False
try:
device = base.PanDevice.create_from_device(ip_address, username, password, api_key=api_key)
device.software.check()
if PanOSVersion(version) != PanOSVersion(device.version):
# Method only performs install if sync is set to true.
device.software.download_install(version, sync=True)
if restart:
device.restart()
changed = True
except PanDeviceError as e:
module.fail_json(msg=e.message)
module.exit_json(changed=changed, version=version)
if __name__ == '__main__':
main()
|
#-----------------------------------------------------------------------------
# Runtime: 100ms
# Memory Usage:
# Link:
#-----------------------------------------------------------------------------
class Solution:
def convert(self, s, numRows):
"""
:type s: str
:type numRows: int
:rtype: str
"""
if numRows <= 1 or len(s) <= 1:
return s
result = ""
cycle = 2 * numRows - 2
for i in range(0, numRows):
for j in range(i, len(s), cycle):
result += s[j]
if i == 0 or i == numRows - 1 or j + cycle - i * 2 >= len(s):
continue
result += s[j + cycle - i * 2]
return result
|
# the script that will be feeded to sbatch
# note: the placeholders {arrayTaskIds} and {script} will be replaced automatically
batchScript = '''#!/bin/bash
#SBATCH --time=4:00:00
#SBATCH --mem=8G
##SBATCH --partition=batch
#SBATCH --partition=short
##SBATCH --partition=gpushort
##SBATCH --gres=gpu:teslak80:1
##SBATCH --gres=gpu:1
#SBATCH --constraint=[hsw|ivb]
#SBATCH --cpus-per-task=1
#SBATCH --output=log/slurm-%A_%a.out
#SBATCH --error=log/slurm-%A_%a.err
#SBATCH --array={arrayTaskIds}
#export CUDA_VISIBLE_DEVICES=""
THEANO_BASE_COMPILEDIR=$TMPDIR/$USER/theano/$SLURM_ARRAY_JOB_ID/$SLURM_ARRAY_TASK_ID
export THEANO_FLAGS=device=gpu,floatX=float32,base_compiledir=$THEANO_BASE_COMPILEDIR
function clean_up {
echo "Cleaning up and removing "$THEANO_BASE_COMPILEDIR
rm -rf $THEANO_BASE_COMPILEDIR
exit
}
trap clean_up SIGINT SIGTERM
#source $PYENVDIR/bin/activate
echo "hostname =" $(hostname)
echo "job id =" $SLURM_ARRAY_JOB_ID
echo "task id =" $SLURM_ARRAY_TASK_ID
srun --mpi=pmi2 python {script} $SLURM_ARRAY_TASK_ID
#deactivate
clean_up
'''
# these will be given to srun when it is called independently (not as a part of sbatch)
independent_srun_args = [
"--time=04:00:00",
"--mem=8G",
"--partition=short",
"--constraint=[hsw|ivb]",
"--mpi=pmi2",
]
## parameters to srun
## note: "--array" parameter will be added automatically
#sbatchParams = '''
##SBATCH --time=04:00:00
##SBATCH --mem=8G
##SBATCH --partition=gpushort
##SBATCH --gres=gpu:teslak80:1
##SBATCH -o generate.out
##SBATCH -e generate.err
#'''
#
## will be added to the generated sbatch file before "srun"
#sbatchBeforeSrun = '''
##export CUDA_VISIBLE_DEVICES=""
#THEANO_BASE_COMPILEDIR=$TMPDIR/$USER/theano/keras-test
#export THEANO_FLAGS=device=gpu,floatX=float32,base_compiledir=$THEANO_BASE_COMPILEDIR
#
#echo "hostname =" $(hostname)
#
#source $PYENVDIR/bin/activate
#'''
#
#sbatchSrun = '''
#export KERAS_BACKEND=tensorflow
#srun python {script} $SLURM_ARRAY_TASK_ID
##export KERAS_BACKEND=theano
##srun --mpi=pmi2 python {script} $SLURM_ARRAY_TASK_ID
#'''
#
## will be added to the generated sbatch file after "srun"
#sbatchAfterSrun = '''
#deactivate
#'''
|
from transmute_core.exceptions import APIException, SerializationException
def test_serialization_error_is_api_exception():
"""
a serialization exception should
be considered in the default exceptions
of the api.
"""
assert isinstance(SerializationException(""), APIException)
|
#include <string.h>
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <sys/mman.h>
#include <sys/stat.h>
#include <fcntl.h>
#include "uik/uik.h"
void main() {
int err;
// int fd = connect_to_named_socket("fabrick");
// if (fd < 0) {
// perror("asd");
// return printf("Error: failed to connect\n");
// }
// uint32_t* buf_p = NULL;
// char buffer_name[BUF_NAME_LENGTH];
// uint32_t size = create_buffer(
// fd,
// 800, 600,
// 50, 50,
// &buf_p, buffer_name
// );
// if (size < 0) {
// return printf("Error: cannot create buffer\n");
// }
// memset(buf_p, 255, size);
// commit_buffer(fd, buffer_name);
// if (size < 0) {
// return printf("Error: cannot commit buffer\n");
// }
// sleep(3);
// memset(buf_p, 120, size);
// commit_buffer(fd, buffer_name);
// if (size < 0) {
// return printf("Error: cannot commit buffer\n");
// }
// sleep(10);
// err = close(fd);
// if (err) {
// return printf("Error: closing socket failed\n");
// }
uik_window_new("win1", 600, 400, 50, 50);
err = uik_mainloop();
}
|
/**
* Note that this script is intended to be included at the *end* of the document, before </body>
*/
(function (window, document) {
if ('open' in document.createElement('details')) return;
// made global by myself to be reused elsewhere
var addEvent = (function () {
if (document.addEventListener) {
return function (el, type, fn) {
if (el && el.nodeName || el === window) {
el.addEventListener(type, fn, false);
} else if (el && el.length) {
for (var i = 0; i < el.length; i++) {
addEvent(el[i], type, fn);
}
}
};
} else {
return function (el, type, fn) {
if (el && el.nodeName || el === window) {
el.attachEvent('on' + type, function () { return fn.call(el, window.event); });
} else if (el && el.length) {
for (var i = 0; i < el.length; i++) {
addEvent(el[i], type, fn);
}
}
};
}
})();
/** details support - typically in it's own script */
// find the first /real/ node
function firstNode(source) {
var node = null;
if (source.firstChild.nodeName != "#text") {
return source.firstChild;
} else {
source = source.firstChild;
do {
source = source.nextSibling;
} while (source && source.nodeName == '#text');
return source || null;
}
}
function isSummary(el) {
var nn = el.nodeName.toUpperCase();
if (nn == 'DETAILS') {
return false;
} else if (nn == 'SUMMARY') {
return true;
} else {
return isSummary(el.parentNode);
}
}
function toggleDetails(event) {
// more sigh - need to check the clicked object
var keypress = event.type == 'keypress',
target = event.target || event.srcElement;
if (((target.parentNode == this) || (target.parentNode.parentNode == this)) && (keypress || isSummary(target))) {
if (keypress) {
// if it's a keypress, make sure it was enter or space
keypress = event.which || event.keyCode;
if (keypress == 32 || keypress == 13) {
// all's good, go ahead and toggle
} else {
return;
}
}
var open = this.getAttribute('open');
if (open === null) {
this.setAttribute('open', 'open');
} else {
this.removeAttribute('open');
}
// this.className = open ? 'open' : ''; // Lame
// trigger reflow (required in IE - sometimes in Safari too)
setTimeout(function () {
document.body.className = document.body.className;
}, 13);
if (keypress) {
event.preventDefault && event.preventDefault();
return false;
}
}
}
function addStyle() {
var style = document.createElement('style'),
head = document.getElementsByTagName('head')[0],
key = style.innerText === undefined ? 'textContent' : 'innerText';
var rules = ['details{display: block;}','details > *{display: none;}','details.open > *{display: block;}','details[open] > *{display: block;}','details > summary:first-child{display: block;cursor: pointer;}','details[open]{display: block;}'];
i = rules.length;
style[key] = rules.join("\n");
head.insertBefore(style, head.firstChild);
}
var details = document.getElementsByTagName('details'),
wrapper,
i = details.length,
j,
first = null,
label = document.createElement('summary');
label.appendChild(document.createTextNode('Details'));
while (i--) {
first = firstNode(details[i]);
if (first != null && first.nodeName.toUpperCase() == 'SUMMARY') {
// we've found that there's a details label already
} else {
// first = label.cloneNode(true); // cloned nodes weren't picking up styles in IE - random
first = document.createElement('summary');
first.appendChild(document.createTextNode('Details'));
if (details[i].firstChild) {
details[i].insertBefore(first, details[i].firstChild);
} else {
details[i].appendChild(first);
}
}
// this feels *really* nasty, but we can't target details :text in css :(
j = details[i].childNodes.length;
while (j--) {
if (details[i].childNodes[j].nodeName === '#text' && (details[i].childNodes[j].nodeValue||'').replace(/\s/g, '').length) {
wrapper = document.createElement('text');
wrapper.appendChild(details[i].childNodes[j]);
details[i].insertBefore(wrapper, details[i].childNodes[j]);
}
}
first.legend = true;
first.tabIndex = 0;
}
// trigger details in case this being used on it's own
document.createElement('details');
addEvent(details, 'click', toggleDetails);
addEvent(details, 'keypress', toggleDetails);
addStyle();
})(window, document);
|
import cleanBasicHtml from '@tryghost/kg-clean-basic-html';
/* global DOMParser, window */
function createParserPlugins(_options = {}) {
const defaults = {};
const options = Object.assign({}, defaults, _options);
if (!options.createDocument) {
const Parser = typeof DOMParser !== 'undefined' && DOMParser || typeof window !== 'undefined' && window.DOMParser;
if (!Parser) {
throw new Error('createParserPlugins() must be passed a `createDocument` function as an option when used in a non-browser environment');
}
options.createDocument = function (html) {
const parser = new Parser();
return parser.parseFromString(html, 'text/html');
};
} // HELPERS -----------------------------------------------------------------
function _readFigCaptionFromNode(node, payload) {
let figcaption = node.querySelector('figcaption');
if (figcaption) {
let cleanHtml = cleanBasicHtml(figcaption.innerHTML, options);
payload.caption = payload.caption ? "".concat(payload.caption, " / ").concat(cleanHtml) : cleanHtml;
figcaption.remove(); // cleanup this processed element
}
}
function _readGalleryImageFromNode(node, imgNum) {
let fileName = node.src.match(/[^/]*$/)[0];
let image = {
fileName,
row: Math.floor(imgNum / 3),
src: node.src
};
if (node.width) {
image.width = node.width;
} else if (node.dataset && node.dataset.width) {
image.width = parseInt(node.dataset.width, 10);
}
if (node.height) {
image.height = node.height;
} else if (node.dataset && node.dataset.height) {
image.height = parseInt(node.dataset.height, 10);
}
if (node.alt) {
image.alt = node.alt;
}
if (node.title) {
image.title = node.title;
}
return image;
} // PLUGINS -----------------------------------------------------------------
function mixtapeEmbed(node, builder, {
addSection,
nodeFinished
}) {
if (node.nodeType !== 1 || node.tagName !== 'DIV' || !node.className.match(/graf--mixtapeEmbed/)) {
return;
}
let anchor = node.querySelector('.markup--mixtapeEmbed-anchor').href;
let title = node.querySelector('.markup--mixtapeEmbed-strong');
let desc = node.querySelector('.markup--mixtapeEmbed-em');
let img = node.querySelector('.mixtapeImage');
let imgSrc = false; // Image is optional,
// The element usually still exists with an additional has.mixtapeImage--empty class and has no background image
if (img && img.style['background-image']) {
imgSrc = img.style['background-image'].match(/url\(([^)]*?)\)/)[1];
} // Format our preferred structure.
let metadata = {
url: anchor,
title: title,
description: desc,
thumbnail: imgSrc
};
let payload = {
metadata,
url: metadata.url,
type: 'bookmark'
};
let cardSection = builder.createCardSection('bookmark', payload);
addSection(cardSection);
nodeFinished();
} // https://github.com/TryGhost/Koenig/issues/1
// allows arbitrary HTML blocks wrapped in our card comments to be extracted
// into a HTML card rather than being put through the normal parse+plugins
function kgHtmlCardToCard(node, builder, {
addSection,
nodeFinished
}) {
if (node.nodeType !== 8 || node.nodeValue !== 'kg-card-begin: html') {
return;
}
let html = [];
function isHtmlEndComment(node) {
return node && node.nodeType === 8 && node.nodeValue === 'kg-card-end: html';
}
let nextNode = node.nextSibling;
while (nextNode && !isHtmlEndComment(nextNode)) {
let currentNode = nextNode;
html.push(currentNode.outerHTML);
nextNode = currentNode.nextSibling; // remove nodes as we go so that they don't go through the parser
currentNode.remove();
}
let payload = {
html: html.join('\n').trim()
};
let cardSection = builder.createCardSection('html', payload);
addSection(cardSection);
nodeFinished();
} // mobiledoc by default ignores <BR> tags but we have a custom SoftReturn atom
function brToSoftBreakAtom(node, builder, {
addMarkerable,
nodeFinished
}) {
if (node.nodeType !== 1 || node.tagName !== 'BR') {
return;
}
let softReturn = builder.createAtom('soft-return');
addMarkerable(softReturn);
nodeFinished();
} // leading newlines in text nodes will add a space to the beginning of the text
// which doesn't render correctly if we're replacing <br> with SoftReturn atoms
// after parsing text as markdown to html
function removeLeadingNewline(node) {
if (node.nodeType !== 3 || node.nodeName !== '#text') {
return;
}
node.nodeValue = node.nodeValue.replace(/^\n/, '');
}
const kgGalleryCardToCard = (node, builder, {
addSection,
nodeFinished
}) => {
if (node.nodeType !== 1 || node.tagName !== 'FIGURE') {
return;
}
if (!node.className.match(/kg-gallery-card/)) {
return;
}
let payload = {};
let imgs = Array.from(node.querySelectorAll('img')); // Process nodes into the payload
payload.images = imgs.map(_readGalleryImageFromNode);
_readFigCaptionFromNode(node, payload);
let cardSection = builder.createCardSection('gallery', payload);
addSection(cardSection);
nodeFinished();
};
function grafGalleryToCard(node, builder, {
addSection,
nodeFinished
}) {
function isGrafGallery(node) {
return node.nodeType === 1 && node.tagName === 'DIV' && node.dataset && node.dataset.paragraphCount && node.querySelectorAll('img').length > 0;
}
if (!isGrafGallery(node)) {
return;
}
let payload = {}; // These galleries exist in multiple divs. Read the images and cation from the first one...
let imgs = Array.from(node.querySelectorAll('img'));
_readFigCaptionFromNode(node, payload); // ...and then iterate over any remaining divs until we run out of matches
let nextNode = node.nextSibling;
while (nextNode && isGrafGallery(nextNode)) {
let currentNode = nextNode;
imgs = imgs.concat(Array.from(currentNode.querySelectorAll('img')));
_readFigCaptionFromNode(currentNode, payload);
nextNode = currentNode.nextSibling; // remove nodes as we go so that they don't go through the parser
currentNode.remove();
} // Process nodes into the payload
payload.images = imgs.map(_readGalleryImageFromNode);
let cardSection = builder.createCardSection('gallery', payload);
addSection(cardSection);
nodeFinished();
}
function figureToImageCard(node, builder, {
addSection,
nodeFinished
}) {
if (node.nodeType !== 1 || node.tagName !== 'FIGURE') {
return;
}
let img = node.querySelector('img');
let kgClass = node.className.match(/kg-width-(wide|full)/);
let grafClass = node.className.match(/graf--layout(FillWidth|OutsetCenter)/);
if (!img) {
return;
}
let payload = {
src: img.src,
alt: img.alt,
title: img.title
};
if (kgClass) {
payload.cardWidth = kgClass[1];
} else if (grafClass) {
payload.cardWidth = grafClass[1] === 'FillWidth' ? 'full' : 'wide';
}
_readFigCaptionFromNode(node, payload);
let cardSection = builder.createCardSection('image', payload);
addSection(cardSection);
nodeFinished();
}
function imgToCard(node, builder, {
addSection,
nodeFinished
}) {
if (node.nodeType !== 1 || node.tagName !== 'IMG') {
return;
}
let payload = {
src: node.src,
alt: node.alt,
title: node.title
};
let cardSection = builder.createCardSection('image', payload);
addSection(cardSection);
nodeFinished();
}
function hrToCard(node, builder, {
addSection,
nodeFinished
}) {
if (node.nodeType !== 1 || node.tagName !== 'HR') {
return;
}
let cardSection = builder.createCardSection('hr');
addSection(cardSection);
nodeFinished();
}
function figureIframeToEmbedCard(node, builder, {
addSection,
nodeFinished
}) {
if (node.nodeType !== 1 || node.tagName !== 'FIGURE') {
return;
}
let iframe = node.querySelector('iframe');
if (!iframe) {
return;
}
let src = iframe.src; // If we don't have a src, or it's not an absolute URL, we can't handle this
if (!src || !src.match(/^https?:\/\//i)) {
return;
}
let payload = {
url: src
};
_readFigCaptionFromNode(node, payload);
payload.html = node.innerHTML;
let cardSection = builder.createCardSection('embed', payload);
addSection(cardSection);
nodeFinished();
}
function figureBlockquoteToEmbedCard(node, builder, {
addSection,
nodeFinished
}) {
if (node.nodeType !== 1 || node.tagName !== 'FIGURE') {
return;
}
let blockquote = node.querySelector('blockquote');
let link = node.querySelector('a');
if (!blockquote || !link) {
return;
}
let url = link.href; // If we don't have a url, or it's not an absolute URL, we can't handle this
if (!url || !url.match(/^https?:\/\//i)) {
return;
}
let payload = {
url: url
};
_readFigCaptionFromNode(node, payload);
payload.html = node.innerHTML;
let cardSection = builder.createCardSection('embed', payload);
addSection(cardSection);
nodeFinished();
}
function figureToCodeCard(node, builder, {
addSection,
nodeFinished
}) {
if (node.nodeType !== 1 || node.tagName !== 'FIGURE') {
return;
}
let pre = node.querySelector('pre'); // If this figure doesn't have a pre tag in it
if (!pre) {
return;
}
let code = pre.querySelector('code');
let figcaption = node.querySelector('figcaption'); // if there's no caption the preCodeToCard plugin will pick it up instead
if (!code || !figcaption) {
return;
}
let payload = {
code: code.textContent
};
_readFigCaptionFromNode(node, payload);
let preClass = pre.getAttribute('class') || '';
let codeClass = code.getAttribute('class') || '';
let langRegex = /lang(?:uage)?-(.*?)(?:\s|$)/i;
let languageMatches = preClass.match(langRegex) || codeClass.match(langRegex);
if (languageMatches) {
payload.language = languageMatches[1].toLowerCase();
}
let cardSection = builder.createCardSection('code', payload);
addSection(cardSection);
nodeFinished();
}
function preCodeToCard(node, builder, {
addSection,
nodeFinished
}) {
if (node.nodeType !== 1 || node.tagName !== 'PRE') {
return;
}
let [codeElement] = node.children;
if (codeElement && codeElement.tagName === 'CODE') {
let payload = {
code: codeElement.textContent
};
let preClass = node.getAttribute('class') || '';
let codeClass = codeElement.getAttribute('class') || '';
let langRegex = /lang(?:uage)?-(.*?)(?:\s|$)/i;
let languageMatches = preClass.match(langRegex) || codeClass.match(langRegex);
if (languageMatches) {
payload.language = languageMatches[1].toLowerCase();
}
let cardSection = builder.createCardSection('code', payload);
addSection(cardSection);
nodeFinished();
}
}
function figureScriptToHtmlCard(node, builder, {
addSection,
nodeFinished
}) {
if (node.nodeType !== 1 || node.tagName !== 'FIGURE') {
return;
}
let script = node.querySelector('script');
if (!script || !script.src.match(/^https:\/\/gist\.github\.com/)) {
return;
}
let payload = {
html: script.outerHTML
};
let cardSection = builder.createCardSection('html', payload);
addSection(cardSection);
nodeFinished();
}
return [mixtapeEmbed, kgHtmlCardToCard, brToSoftBreakAtom, removeLeadingNewline, kgGalleryCardToCard, figureBlockquoteToEmbedCard, // I think these can contain images
grafGalleryToCard, figureToImageCard, imgToCard, hrToCard, figureToCodeCard, preCodeToCard, figureIframeToEmbedCard, figureScriptToHtmlCard];
}
export { createParserPlugins };
//# sourceMappingURL=parser-plugins.js.map
|
# Credits: @mrismanaziz
# Thanks To @tofik_dn || https://github.com/tofikdn
# FROM Zee-Userbot <https://github.com/kykoubot/Zee-Userbot>
# t.me/Dbzea & t.me/Storezeastore
from pytgcalls import StreamType
from pytgcalls.types import Update
from pytgcalls.types.input_stream import AudioPiped, AudioVideoPiped
from pytgcalls.types.input_stream.quality import (
HighQualityAudio,
HighQualityVideo,
LowQualityVideo,
MediumQualityVideo,
)
from telethon.tl import types
from telethon.utils import get_display_name
from youtubesearchpython import VideosSearch
from userbot import CMD_HANDLER as cmd
from userbot import CMD_HELP
from userbot import PLAY_PIC as fotoplay
from userbot import QUEUE_PIC as ngantri
from userbot import call_py
from userbot.utils import bash, edit_delete, edit_or_reply, man_cmd
from userbot.utils.chattitle import CHAT_TITLE
from userbot.utils.queues.queues import (
QUEUE,
add_to_queue,
clear_queue,
get_queue,
pop_an_item,
)
from userbot.utils.thumbnail import gen_thumb
def vcmention(user):
full_name = get_display_name(user)
if not isinstance(user, types.User):
return full_name
return f"[{full_name}](tg://user?id={user.id})"
def ytsearch(query: str):
try:
search = VideosSearch(query, limit=1).result()
data = search["result"][0]
songname = data["title"]
url = data["link"]
duration = data["duration"]
thumbnail = data["thumbnails"][0]["url"]
return [songname, url, duration, thumbnail]
except Exception as e:
print(e)
return 0
async def ytdl(format: str, link: str):
stdout, stderr = await bash(f'yt-dlp -g -f "{format}" {link}')
if stdout:
return 1, stdout.split("\n")[0]
return 0, stderr
async def skip_item(chat_id: int, x: int):
if chat_id not in QUEUE:
return 0
chat_queue = get_queue(chat_id)
try:
songname = chat_queue[x][0]
chat_queue.pop(x)
return songname
except Exception as e:
print(e)
return 0
async def skip_current_song(chat_id: int):
if chat_id not in QUEUE:
return 0
chat_queue = get_queue(chat_id)
if len(chat_queue) == 1:
await call_py.leave_group_call(chat_id)
clear_queue(chat_id)
return 1
songname = chat_queue[1][0]
url = chat_queue[1][1]
link = chat_queue[1][2]
type = chat_queue[1][3]
RESOLUSI = chat_queue[1][4]
if type == "Audio":
await call_py.change_stream(
chat_id,
AudioPiped(
url,
HighQualityAudio(),
),
)
elif type == "Video":
if RESOLUSI == 720:
hm = HighQualityVideo()
elif RESOLUSI == 480:
hm = MediumQualityVideo()
elif RESOLUSI == 360:
hm = LowQualityVideo()
await call_py.change_stream(
chat_id, AudioVideoPiped(url, HighQualityAudio(), hm)
)
pop_an_item(chat_id)
return [songname, link, type]
@man_cmd(pattern="play(?:\s|$)([\s\S]*)")
async def vc_play(event):
title = event.pattern_match.group(1)
replied = await event.get_reply_message()
sender = await event.get_sender()
chat = await event.get_chat()
chat_id = event.chat_id
from_user = vcmention(event.sender)
if (
replied
and not replied.audio
and not replied.voice
and not title
or not replied
and not title
):
return await edit_or_reply(event, "**Silahkan Masukan Judul Lagu**")
elif replied and not replied.audio and not replied.voice or not replied:
botman = await edit_or_reply(event, "`Searching...`")
query = event.text.split(maxsplit=1)[1]
search = ytsearch(query)
if search == 0:
await botman.edit(
"**Tidak Dapat Menemukan Lagu** Coba cari dengan Judul yang Lebih Spesifik"
)
else:
songname = search[0]
title = search[0]
url = search[1]
duration = search[2]
thumbnail = search[3]
userid = sender.id
titlegc = chat.title
ctitle = await CHAT_TITLE(titlegc)
thumb = await gen_thumb(thumbnail, title, userid, ctitle)
format = "best[height<=?720][width<=?1280]"
hm, ytlink = await ytdl(format, url)
if hm == 0:
await botman.edit(f"`{ytlink}`")
elif chat_id in QUEUE:
pos = add_to_queue(chat_id, songname, ytlink, url, "Audio", 0)
caption = f"💡 **Lagu Ditambahkan Ke antrian »** `#{pos}`\n\n**🏷 Judul:** [{songname}]({url})\n**⏱ Durasi:** `{duration}`\n🎧 **Atas permintaan:** {from_user}"
await botman.delete()
await event.client.send_file(
chat_id, thumb, caption=caption, reply_to=event.reply_to_msg_id
)
else:
try:
await call_py.join_group_call(
chat_id,
AudioPiped(
ytlink,
HighQualityAudio(),
),
stream_type=StreamType().pulse_stream,
)
add_to_queue(chat_id, songname, ytlink, url, "Audio", 0)
caption = f"🏷 **Judul:** [{songname}]({url})\n**⏱ Durasi:** `{duration}`\n💡 **Status:** `Sedang Memutar`\n🎧 **Atas permintaan:** {from_user}"
await botman.delete()
await event.client.send_file(
chat_id, thumb, caption=caption, reply_to=event.reply_to_msg_id
)
except Exception as ep:
clear_queue(chat_id)
await botman.edit(f"`{ep}`")
else:
botman = await edit_or_reply(event, "📥 **Sedang Mendownload**")
dl = await replied.download_media()
link = f"https://t.me/c/{chat.id}/{event.reply_to_msg_id}"
if replied.audio:
songname = "Telegram Music Player"
elif replied.voice:
songname = "Voice Note"
if chat_id in QUEUE:
pos = add_to_queue(chat_id, songname, dl, link, "Audio", 0)
caption = f"💡 **Lagu Ditambahkan Ke antrian »** `#{pos}`\n\n**🏷 Judul:** [{songname}]({link})\n**👥 Chat ID:** `{chat_id}`\n🎧 **Atas permintaan:** {from_user}"
await event.client.send_file(
chat_id, ngantri, caption=caption, reply_to=event.reply_to_msg_id
)
await botman.delete()
else:
try:
await call_py.join_group_call(
chat_id,
AudioPiped(
dl,
HighQualityAudio(),
),
stream_type=StreamType().pulse_stream,
)
add_to_queue(chat_id, songname, dl, link, "Audio", 0)
caption = f"🏷 **Judul:** [{songname}]({link})\n**👥 Chat ID:** `{chat_id}`\n💡 **Status:** `Sedang Memutar Lagu`\n🎧 **Atas permintaan:** {from_user}"
await event.client.send_file(
chat_id, fotoplay, caption=caption, reply_to=event.reply_to_msg_id
)
await botman.delete()
except Exception as ep:
clear_queue(chat_id)
await botman.edit(f"`{ep}`")
@man_cmd(pattern="vplay(?:\s|$)([\s\S]*)")
async def vc_vplay(event):
title = event.pattern_match.group(1)
replied = await event.get_reply_message()
sender = await event.get_sender()
userid = sender.id
chat = await event.get_chat()
titlegc = chat.title
chat_id = event.chat_id
from_user = vcmention(event.sender)
if (
replied
and not replied.video
and not replied.document
and not title
or not replied
and not title
):
return await edit_or_reply(event, "**Silahkan Masukan Judul Video**")
if replied and not replied.video and not replied.document:
xnxx = await edit_or_reply(event, "`Searching...`")
query = event.text.split(maxsplit=1)[1]
search = ytsearch(query)
RESOLUSI = 720
hmmm = HighQualityVideo()
if search == 0:
await xnxx.edit(
"**Tidak Dapat Menemukan Video** Coba cari dengan Judul yang Lebih Spesifik"
)
else:
songname = search[0]
title = search[0]
url = search[1]
duration = search[2]
thumbnail = search[3]
ctitle = await CHAT_TITLE(titlegc)
thumb = await gen_thumb(thumbnail, title, userid, ctitle)
format = "best[height<=?720][width<=?1280]"
hm, ytlink = await ytdl(format, url)
if hm == 0:
await xnxx.edit(f"`{ytlink}`")
elif chat_id in QUEUE:
pos = add_to_queue(chat_id, songname, ytlink, url, "Video", RESOLUSI)
caption = f"💡 **Video Ditambahkan Ke antrian »** `#{pos}`\n\n**🏷 Judul:** [{songname}]({url})\n**⏱ Durasi:** `{duration}`\n🎧 **Atas permintaan:** {from_user}"
await xnxx.delete()
await event.client.send_file(
chat_id, thumb, caption=caption, reply_to=event.reply_to_msg_id
)
else:
try:
await call_py.join_group_call(
chat_id,
AudioVideoPiped(
ytlink,
HighQualityAudio(),
hmmm,
),
stream_type=StreamType().pulse_stream,
)
add_to_queue(chat_id, songname, ytlink, url, "Video", RESOLUSI)
await xnxx.edit(
f"**🏷 Judul:** [{songname}]({url})\n**⏱ Durasi:** `{duration}`\n💡 **Status:** `Sedang Memutar Video`\n🎧 **Atas permintaan:** {from_user}",
link_preview=False,
)
except Exception as ep:
clear_queue(chat_id)
await xnxx.edit(f"`{ep}`")
elif replied:
xnxx = await edit_or_reply(event, "📥 **Sedang Mendownload**")
dl = await replied.download_media()
link = f"https://t.me/c/{chat.id}/{event.reply_to_msg_id}"
if len(event.text.split()) < 2:
RESOLUSI = 720
else:
pq = event.text.split(maxsplit=1)[1]
RESOLUSI = int(pq)
if replied.video or replied.document:
songname = "Telegram Video Player"
if chat_id in QUEUE:
pos = add_to_queue(chat_id, songname, dl, link, "Video", RESOLUSI)
caption = f"💡 **Video Ditambahkan Ke antrian »** `#{pos}`\n\n**🏷 Judul:** [{songname}]({link})\n**👥 Chat ID:** `{chat_id}`\n🎧 **Atas permintaan:** {from_user}"
await event.client.send_file(
chat_id, ngantri, caption=caption, reply_to=event.reply_to_msg_id
)
await xnxx.delete()
else:
if RESOLUSI == 360:
hmmm = LowQualityVideo()
elif RESOLUSI == 480:
hmmm = MediumQualityVideo()
elif RESOLUSI == 720:
hmmm = HighQualityVideo()
try:
await call_py.join_group_call(
chat_id,
AudioVideoPiped(
dl,
HighQualityAudio(),
hmmm,
),
stream_type=StreamType().pulse_stream,
)
add_to_queue(chat_id, songname, dl, link, "Video", RESOLUSI)
caption = f"🏷 **Judul:** [{songname}]({link})\n**👥 Chat ID:** `{chat_id}`\n💡 **Status:** `Sedang Memutar Video`\n🎧 **Atas permintaan:** {from_user}"
await xnxx.delete()
await event.client.send_file(
chat_id, fotoplay, caption=caption, reply_to=event.reply_to_msg_id
)
except Exception as ep:
clear_queue(chat_id)
await xnxx.edit(f"`{ep}`")
else:
xnxx = await edit_or_reply(event, "`Searching...`")
query = event.text.split(maxsplit=1)[1]
search = ytsearch(query)
RESOLUSI = 720
hmmm = HighQualityVideo()
if search == 0:
await xnxx.edit("**Tidak Menemukan Video untuk Keyword yang Diberikan**")
else:
songname = search[0]
title = search[0]
url = search[1]
duration = search[2]
thumbnail = search[3]
ctitle = await CHAT_TITLE(titlegc)
thumb = await gen_thumb(thumbnail, title, userid, ctitle)
format = "best[height<=?720][width<=?1280]"
hm, ytlink = await ytdl(format, url)
if hm == 0:
await xnxx.edit(f"`{ytlink}`")
elif chat_id in QUEUE:
pos = add_to_queue(chat_id, songname, ytlink, url, "Video", RESOLUSI)
caption = f"💡 **Video Ditambahkan Ke antrian »** `#{pos}`\n\n🏷 **Judul:** [{songname}]({url})\n**⏱ Durasi:** `{duration}`\n🎧 **Atas permintaan:** {from_user}"
await xnxx.delete()
await event.client.send_file(
chat_id, thumb, caption=caption, reply_to=event.reply_to_msg_id
)
else:
try:
await call_py.join_group_call(
chat_id,
AudioVideoPiped(
ytlink,
HighQualityAudio(),
hmmm,
),
stream_type=StreamType().pulse_stream,
)
add_to_queue(chat_id, songname, ytlink, url, "Video", RESOLUSI)
caption = f"🏷 **Judul:** [{songname}]({url})\n**⏱ Durasi:** `{duration}`\n💡 **Status:** `Sedang Memutar Video`\n🎧 **Atas permintaan:** {from_user}"
await xnxx.delete()
await event.client.send_file(
chat_id, thumb, caption=caption, reply_to=event.reply_to_msg_id
)
except Exception as ep:
clear_queue(chat_id)
await xnxx.edit(f"`{ep}`")
@man_cmd(pattern="end$")
async def vc_end(event):
chat_id = event.chat_id
if chat_id in QUEUE:
try:
await call_py.leave_group_call(chat_id)
clear_queue(chat_id)
await edit_or_reply(event, "**Menghentikan Streaming**")
except Exception as e:
await edit_delete(event, f"**ERROR:** `{e}`")
else:
await edit_delete(event, "**Tidak Sedang Memutar Streaming**")
@man_cmd(pattern="skip(?:\s|$)([\s\S]*)")
async def vc_skip(event):
chat_id = event.chat_id
if len(event.text.split()) < 2:
op = await skip_current_song(chat_id)
if op == 0:
await edit_delete(event, "**Tidak Sedang Memutar Streaming**")
elif op == 1:
await edit_delete(event, "antrian kosong, meninggalkan obrolan suara", 10)
else:
await edit_or_reply(
event,
f"**⏭ Melewati Lagu**\n**🎧 Sekarang Memutar** - [{op[0]}]({op[1]})",
link_preview=False,
)
else:
skip = event.text.split(maxsplit=1)[1]
DELQUE = "**Menghapus Lagu Berikut Dari Antrian:**"
if chat_id in QUEUE:
items = [int(x) for x in skip.split(" ") if x.isdigit()]
items.sort(reverse=True)
for x in items:
if x != 0:
hm = await skip_item(chat_id, x)
if hm != 0:
DELQUE = DELQUE + "\n" + f"**#{x}** - {hm}"
await event.edit(DELQUE)
@man_cmd(pattern="pause$")
async def vc_pause(event):
chat_id = event.chat_id
if chat_id in QUEUE:
try:
await call_py.pause_stream(chat_id)
await edit_or_reply(event, "**Streaming Dijeda**")
except Exception as e:
await edit_delete(event, f"**ERROR:** `{e}`")
else:
await edit_delete(event, "**Tidak Sedang Memutar Streaming**")
@man_cmd(pattern="resume$")
async def vc_resume(event):
chat_id = event.chat_id
if chat_id in QUEUE:
try:
await call_py.resume_stream(chat_id)
await edit_or_reply(event, "**Streaming Dilanjutkan**")
except Exception as e:
await edit_or_reply(event, f"**ERROR:** `{e}`")
else:
await edit_delete(event, "**Tidak Sedang Memutar Streaming**")
@man_cmd(pattern=r"volume(?: |$)(.*)")
async def vc_volume(event):
query = event.pattern_match.group(1)
me = await event.client.get_me()
chat = await event.get_chat()
admin = chat.admin_rights
creator = chat.creator
chat_id = event.chat_id
if not admin and not creator:
return await edit_delete(event, f"**Maaf {me.first_name} Bukan Admin 👮**", 30)
if chat_id in QUEUE:
try:
await call_py.change_volume_call(chat_id, volume=int(query))
await edit_or_reply(
event, f"**Berhasil Mengubah Volume Menjadi** `{query}%`"
)
except Exception as e:
await edit_delete(event, f"**ERROR:** `{e}`", 30)
else:
await edit_delete(event, "**Tidak Sedang Memutar Streaming**")
@man_cmd(pattern="playlist$")
async def vc_playlist(event):
chat_id = event.chat_id
if chat_id in QUEUE:
chat_queue = get_queue(chat_id)
if len(chat_queue) == 1:
await edit_or_reply(
event,
f"**🎧 Sedang Memutar:**\n• [{chat_queue[0][0]}]({chat_queue[0][2]}) | `{chat_queue[0][3]}`",
link_preview=False,
)
else:
PLAYLIST = f"**🎧 Sedang Memutar:**\n**• [{chat_queue[0][0]}]({chat_queue[0][2]})** | `{chat_queue[0][3]}` \n\n**• Daftar Putar:**"
l = len(chat_queue)
for x in range(1, l):
hmm = chat_queue[x][0]
hmmm = chat_queue[x][2]
hmmmm = chat_queue[x][3]
PLAYLIST = PLAYLIST + "\n" + f"**#{x}** - [{hmm}]({hmmm}) | `{hmmmm}`"
await edit_or_reply(event, PLAYLIST, link_preview=False)
else:
await edit_delete(event, "**Tidak Sedang Memutar Streaming**")
@call_py.on_stream_end()
async def stream_end_handler(_, u: Update):
chat_id = u.chat_id
print(chat_id)
await skip_current_song(chat_id)
@call_py.on_closed_voice_chat()
async def closedvc(_, chat_id: int):
if chat_id in QUEUE:
clear_queue(chat_id)
@call_py.on_left()
async def leftvc(_, chat_id: int):
if chat_id in QUEUE:
clear_queue(chat_id)
@call_py.on_kicked()
async def kickedvc(_, chat_id: int):
if chat_id in QUEUE:
clear_queue(chat_id)
CMD_HELP.update(
{
"vcplugin": f"**Plugin : **`vcplugin`\
\n\n • **Syntax :** `{cmd}play` <Judul Lagu/Link YT>\
\n • **Function : **Untuk Memutar Lagu di voice chat group dengan akun kamu\
\n\n • **Syntax :** `{cmd}vplay` <Judul Video/Link YT>\
\n • **Function : **Untuk Memutar Video di voice chat group dengan akun kamu\
\n\n • **Syntax :** `{cmd}end`\
\n • **Function : **Untuk Memberhentikan video/lagu yang sedang putar di voice chat group\
\n\n • **Syntax :** `{cmd}skip`\
\n • **Function : **Untuk Melewati video/lagu yang sedang di putar\
\n\n • **Syntax :** `{cmd}pause`\
\n • **Function : **Untuk memberhentikan video/lagu yang sedang diputar\
\n\n • **Syntax :** `{cmd}resume`\
\n • **Function : **Untuk melanjutkan pemutaran video/lagu yang sedang diputar\
\n\n • **Syntax :** `{cmd}volume` 1-200\
\n • **Function : **Untuk mengubah volume (Membutuhkan Hak admin)\
\n\n • **Syntax :** `{cmd}playlist`\
\n • **Function : **Untuk menampilkan daftar putar Lagu/Video\
"
}
)
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Part of pymzml test cases
"""
# import sys
# import os
# # import PyNumpress
# import pymzml
# import pymzml.decoder as decoder
# import time
# import unittest
# import numpy as np
# # import PyNumpress as pnp
# import zlib
# from base64 import b64encode as b64enc
# import test_file_paths
# class DecoderTest(unittest.TestCase):
# def assertPeaksIdentical(self, peaks1, peaks2, msg=None):
# self.assertEqual(len(peaks1), len(peaks2))#, msg='List have different number of peaks!')
# for x in range(len(peaks1)):
# self.assertCountEqual(peaks1[x], peaks2[x], msg=msg)
# def setUp(self):
# self.paths = test_file_paths.paths
# self.Decoder = pymzml.Decoder
# self.Run = pymzml.run.Reader(self.paths[2])
# def test_decode_numpress(self):
# arr = np.asarray([1,2,3], dtype=np.float64)
# dec = pnp.MSNumpress([])
# enc_np = dec.encode_linear(arr, dec.optimal_linear_fixed_point(arr))
# enc_np_zlib = zlib.compress(enc_np)
# enc_np_zlib_b64 = b64enc(enc_np_zlib)
# comp = ['zlib', 'MS-Numpress linear prediction compression']
# d_type, decoded = decoder._decode(enc_np_zlib_b64, comp, 3, '32-bit float', 'i')
# self.assertCountEqual(arr, decoded)
# def test_decode_numpress(self):
# test_array = np.asarray([1,2,3], dtype=np.float64)
# MSNumpress = PyNumpress.MSNumpress([])
# nump_enc = MSNumpress.encode_linear(test_array, MSNumpress.optimal_linear_fixed_point(test_array))
# zlib_nump_enc = zlib.compress(nump_enc)
# b64_zlib_nump_enc =b64enc( zlib_nump_enc )
# d_type, arr = decoder._decode(b64_zlib_nump_enc, ['zlib', 'ms-np-slof'], 3, '64-bit float', 'i')
# self.assertIsNotNone(len(arr))
# self.assertIsInstance(arr, list)
# def test_decode_32_bit_no_compression(self):
# b64_array = 'cgDIQjgByEL+AchCxQLIQiGL7kIjjO5CJY3uQieO7kIpj+5CK5DuQi2R7kIvku5CMpPuQjeU7kI5le5CO5buQj2X7kK6QgJDTUMCQ+FDAkN0RAJDB0UCQ5pFAkMuRgJDwUYCQ1RHAkPoRwJDe0gCQw9JAkOiSQJDj/YGQyr3BkPF9wZDYfgGQ/z4BkOX+QZDM/oGQ876BkNp+wZDBfwGQ6L8BkM9/QZD2P0GQ3T+BkPmBRVDmgYVQ04HFUMCCBVDtggVQ2oJFUMfChVD0woVQ4cLFUM7DBVD8AwVQ6QNFUNYDhVDDA8VQ8EPFUN1EBVDKREVQ/UFFkOrBhZDYQcWQxcIFkPNCBZDgwkWQzkKFkPvChZDpQsWQ1sMFkMRDRZDxw0WQ34OFkM0DxZDw6gZQ4CpGUM9qhlD+aoZQ7arGUNzrBlDL60ZQ+ytGUOprhlDZq8ZQyKwGUPfsBlDnLEZQ0iiGkMGoxpDxaMaQ4OkGkNCpRpDAKYaQ7+mGkN9pxpDPKgaQ/+oGkO9qRpDfKoaQzqrGkO0o0dDy6RHQ+OlR0P6pkdDEahHQympR0NAqkdDWKtHQ2+sR0OHrUdDnq5HQ7avR0PNsEdDaT1RQ5U+UUPAP1FD7EBRQxhCUUNEQ1FDcERRQ5xFUUPHRlFD9EdRQyBJUUNLSlFDd0tRQ3p0bEPidWxDS3dsQ7N4bEMbemxDg3tsQ+t8bENTfmxDvH9sQySBbEONgmxD9YNsQ12FbEPGhmxDDn6EQ+R+hEO5f4RDj4CEQ2WBhEM6goRDEIOEQ+aDhEO7hIRDkYWEQ2eGhEM8h4RDEoiEQ5x6hUN0e4VDTHyFQyR9hUP9fYVD1X6FQ61/hUOFgIVDXYGFQzWChUMNg4VD5YOFQ72EhUOVhYVDlnaGQ3B3hkNLeIZDJXmGQwB6hkPaeoZDtHuGQ498hkNpfYZDRH6GQx5/hkP5f4ZD04CGQ66BhkOIgoZDY4OGQz2EhkMM9oZD6PaGQ8T3hkOf+IZDe/mGQ1f6hkMy+4ZDDvyGQ+r8hkPF/YZDof6GQ33/hkNYAIdDNAGHQxACh0PrAodDG9CIQ/vQiEPb0YhDu9KIQ5vTiEN71IhDXNWIQzzWiEMc14hD/NeIQ9zYiEO82YhDnNqIQ33biENvQYxDV0KMQ0BDjEMpRIxDEUWMQ/pFjEPjRoxDy0eMQ7RIjEOdSYxDhkqMQ25LjENXTIxDT3+MQzmAjEMigYxDC4KMQ/WCjEPeg4xDx4SMQ7CFjEOahoxDg4eMQ2yIjENWiYxDP4qMQyiLjEMSjIxD+4yMQ+WNjEMhAI1DCwGNQ/YBjUPgAo1DywONQ7UEjUOgBY1DigaNQ3UHjUNgCI1DSgmNQzUKjUMfC41DCgyNQ/UMjUP0fY1D4H6NQ8x/jUO3gI1Do4GNQ4+CjUN7g41DZ4SNQ1KFjUM+ho1DKoeNQxaIjUMCiY1D7YmNQ9mKjUO6eY5DqXqOQ5d7jkOFfI5Dc32OQ2F+jkNQf45DPoCOQyyBjkMago5DCYOOQ/eDjkPlhI5D1IWOQ8KGjkOwh45Dn4iOQ3n6jkNo+45DWPyOQ0f9jkM3/o5DJv+OQxYAj0MFAY9D9QGPQ+QCj0PUA49DwwSPQ7IFj0OiBo9DkQePQ4EIj0P7f5VD+4CVQ/uBlUP7gpVD+4OVQ/uElUP7hZVD+4aVQ/uHlUP7iJVD/ImVQ/yKlUP8i5VD/IyVQ/yNlUP8jpVD/I+VQ4AtmkOMLppDmC+aQ6UwmkOxMZpDvTKaQ8kzmkPVNJpD4jWaQ+42mkP6N5pDBjmaQxM6mkMfO5pDbF3DQ+pew0NpYMND52HDQw=='
# d_type, arr = decoder._decode(b64_array, 'no compression', 343, '32-bit float', 'i')
# self.assertIsNotNone(len(arr))
# self.assertIsInstance(arr, np.ndarray)
# def test_pool_decode_TIC(self):
# """
# """
# spec = self.Run["TIC"]
# spec2 = self.Run["TIC"]
# paramsMZ = spec._get_encoding_parameters('time array')
# paramsMZ += ('time',)
# paramsI = spec._get_encoding_parameters('intensity array')
# paramsI += ('i',)
# s = time.time()
# self.Decoder.pool_decode([paramsMZ, paramsI], spec._register)
# t1 = time.time() - s
# peaks1 = spec.profile
# s = time.time()
# peaks2 = list(zip(spec.time, spec.i))
# t2 = time.time() - s
# self.assertPeaksIdentical(peaks1, peaks2)
# assert t1 < t2, 'parallel version is slower than normal version:\n' \
# 'Parallel took {0:.5f} seconds\n' \
# 'Normal took {1:.05f} seconds\n'.format(t1, t2)
# def test_pool_decode_spec4000(self):
# spec3 = self.Run[5]
# spec4 = self.Run[5]
# paramsMZ = spec3._get_encoding_parameters('m/z array')
# paramsMZ += ('mz',)
# paramsI = spec3._get_encoding_parameters('intensity array')
# paramsI += ('i',)
# s = time.time()
# self.Decoder.pool_decode([paramsMZ, paramsI], spec3._register)
# t1 = time.time() - s
# peaks3 = spec3.peaks
# s = time.time()
# peaks4 = list(zip(spec4.mz, spec4.i))
# t2 = time.time() - s
# self.assertPeaksIdentical(peaks3, peaks4)
# assert t1 < t2, 'parallel version is slower than normal version:\n' \
# 'Parallel took {0:.5f} seconds\n' \
# 'Normal took {1:.05f} seconds\n'.format(t1, t2)
# def test_big_parallel_array(self):
# arr = np.asarray([x for x in range(1000000)], dtype=np.float64)
# dec = pnp.MSNumpress([])
# enc_np = dec.encode_linear(arr, dec.optimal_linear_fixed_point(arr))
# enc_np_zlib = zlib.compress(enc_np)
# enc_np_zlib_b64 = b64enc(enc_np_zlib)
# comp = ['zlib', 'MS-Numpress linear prediction compression']
# params = [
# (enc_np_zlib_b64, comp, 100000, '32-bit float', 'i'),
# (enc_np_zlib_b64, comp, 100000, '32-bit float', 'mz')
# ]
# s = time.time()
# self.Decoder.pool_decode(params, lambda x,y :print(x,y))
# t1 = time.time() - s
# s = time.time()
# d_type, decoded = decoder._decode(*params[0])
# d_type, decoded = decoder._decode(*params[1])
# t2 = time.time() - s
# print(t1, t2)
# assert t1 < t2
if __name__ == "__main__":
unittest.main(verbosity=3)
|
from setuptools import setup, find_packages
install_requires = [
'django',
]
version = "0.3.1"
setup(name='django-medusa-unstoppable',
version=version,
description='A Django static website generator. Fork of django-medusa',
author='Tobias Schulmann', # update this as needed
author_email='tobiasschulmann@catalyst.net.nz', # update this as needed
url='https://github.com/GeoTob/django-medusa-unstoppable',
download_url='https://github.com/GeoTob/django-medusa-unstoppable/releases/tag/0.3.1',
packages=find_packages(),
install_requires=install_requires,
license='MIT',
keywords='django static staticwebsite staticgenerator publishing',
classifiers=["Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules"
],
)
|
/* Generated by RuntimeBrowser
Image: /System/Library/PrivateFrameworks/ResponseKit.framework/ResponseKit
*/
@interface RKSentenceClassifier_zh_Hans_CN : RKSentenceClassifier
- (id)addSentenceTerminatorQuestion:(id)arg1;
- (id)alternativeConjunctions;
- (void)analyzeSentence;
- (id)classifySentence;
@end
|
/**
* @module ol/CollectionEventType
*/
/**
* @enum {string}
*/
const CollectionEventType = {
/**
* Triggered when an item is added to the collection.
* @event module:ol/Collection.CollectionEvent#add
* @api
*/
ADD: 'add',
/**
* Triggered when an item is removed from the collection.
* @event module:ol/Collection.CollectionEvent#remove
* @api
*/
REMOVE: 'remove',
};
export default CollectionEventType;
|
from typing import Text
class Tag(str):
def __str__(self) -> Text:
return ":%s" % super().__str__()
def __repr__(self) -> Text:
return "%s('%s')" % ('Tag', super().__repr__())
|
export const removeAReportAPI = async (reportId) => {
const response = await fetch(`/api/board/1/report/${reportId}`, {
method: 'DELETE',
headers: {
'Content-Type': 'application/json',
},
})
if (!response.ok) return null
if (response.redirected) location.href = response.url
const { success } = await response.json()
return success
}
|
#import <Flutter/Flutter.h>
@interface QrCodeScanner2Plugin : NSObject<FlutterPlugin>
@end
|
import json
def lambda_handler(event, context):
try:
body = json.loads(event['body'])
except:
return {
"statusCode": 400,
"body": json.dumps({'message': 'Unable to parse hasura event'})
}
message = 'Not able to process request'
data = body['event']['data']
if body['table']['name'] == 'notes' and body['event']['op'] == 'INSERT':
message = 'New note {} inserted, with data: {}'.format(data['new']['id'], data['new']['note'])
elif body['table']['name'] == 'notes' and body['event']['op'] == 'UPDATE':
message = 'Note {} updated, with data: {}'.format(data['new']['id'], data['new']['note'])
elif body['table'] == 'notes' and body['op'] == 'DELETE':
message = 'Note {} deleted, with data: {}'.format(data['old']['id'], data['old']['note'])
return {
"statusCode": 200,
"body": json.dumps({'message': message})
}
|
import sys
import os
ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(os.path.dirname('__file__'))))
sys.path.insert(0, ROOT_DIR)
import logging
import torch
from torch.utils.data import DataLoader, Subset, ConcatDataset
from models.mask_r_cnn_model import get_mask_r_cnn
from data.mask_r_cnn_dataset import PennFudanDataset
from trainers.mask_r_cnn_trainer import MaskRCNNTrainer
from logger.logger import main_run, default_log_config
from utils import collate_fn
import data.custom_transforms as t_custom
# default configuration file with hyperparameters
DEFAULT_CONFIG = 'train.json'
torch.manual_seed(0)
def main(config, args):
# train on the GPU or on the CPU, if a GPU is not available
device = torch.device('cuda') if torch.cuda.is_available() else torch.device('cpu')
# create an instance of logger
logger = logging.getLogger(os.path.basename(__file__))
if args.resource_dir is not None:
resources_dir = args.resource_dir
else:
resources_dir = os.path.join(ROOT_DIR, 'resources', config.get('resource_dir', 'PennFudanPed'))
images = os.path.join(resources_dir, 'PNGImages')
masks = os.path.join(resources_dir, 'PedMasks')
transform_1 = t_custom.Compose([t_custom.GaussianSmoothingBbox([0.5, 1]),
t_custom.ToTensor(),
t_custom.RandomHorizontalFlip(0.5)])
transform_2 = t_custom.Compose([t_custom.ColorJitterBbox(0.2, 0.2, 0.2, 0.2),
t_custom.ToTensor()])
dataset_1 = PennFudanDataset(root=resources_dir, data_paths=[images, masks], extensions=(('.png'),) * 2,
transforms=transform_1)
dataset_2 = PennFudanDataset(root=resources_dir, data_paths=[images, masks], extensions=(('.png'),) * 2,
transforms=transform_2)
dataset_test = PennFudanDataset(root=resources_dir, data_paths=[images, masks], extensions=(('.png'),) * 2,
transforms=t_custom.ToTensor())
# split the dataset in train and test set
indices = torch.randperm(len(dataset_1)).tolist()
dataset_train_aug_1 = Subset(dataset_1, indices[:-50])
dataset_train_aug_2 = Subset(dataset_2, indices[:-50])
dataset_train = ConcatDataset([dataset_train_aug_1, dataset_train_aug_2])
dataset_test = Subset(dataset_test, indices[-50:])
# define training and validation data loaders
data_loader = DataLoader(dataset_train, batch_size=2, shuffle=True, num_workers=0, collate_fn=collate_fn)
data_loader_test = DataLoader(dataset_test, batch_size=1, shuffle=False, num_workers=0, collate_fn=collate_fn)
dataloaders = {'train': data_loader, 'val': data_loader_test}
model = get_mask_r_cnn(num_classes=2)
# move model to the right device
model.to(device)
# construct an optimizer
params = [p for p in model.parameters() if p.requires_grad]
optimizer = torch.optim.SGD(params, lr=config.get('lr', 0.005), momentum=0.9, weight_decay=0.0005)
# and a learning rate scheduler
lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=3, gamma=0.1)
trainer = MaskRCNNTrainer(dataloaders=dataloaders, root=ROOT_DIR, model=model, criterion=None,
optimizer=optimizer, scheduler=lr_scheduler, metrics={}, epochs=config.get('epochs', 10),
save_dir=args.save_dir, checkpoint=args.checkpoint, change_lr=args.change_lr)
trainer.train()
if __name__ == '__main__':
default_log_config()
main_run(main, DEFAULT_CONFIG)
|
(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){
require('./src/primitives').registerAll();
},{"./src/primitives":5}],2:[function(require,module,exports){
/**
* Flat grid.
*
* Defaults to 75x75.
*/
module.exports = {
defaultComponents: {
geometry: {
primitive: 'plane',
width: 75,
height: 75
},
rotation: {x: -90, y: 0, z: 0},
material: {
src: 'url(https://cdn.rawgit.com/donmccurdy/aframe-extras/v1.16.3/assets/grid.png)',
repeat: '75 75'
}
},
mappings: {
width: 'geometry.width',
depth: 'geometry.depth',
src: 'material.src'
}
};
},{}],3:[function(require,module,exports){
/**
* Flat-shaded ocean primitive.
*
* Based on a Codrops tutorial:
* http://tympanus.net/codrops/2016/04/26/the-aviator-animating-basic-3d-scene-threejs/
*/
module.exports.Primitive = {
defaultComponents: {
ocean: {},
rotation: {x: -90, y: 0, z: 0}
},
mappings: {
width: 'ocean.width',
depth: 'ocean.depth',
density: 'ocean.density',
color: 'ocean.color',
opacity: 'ocean.opacity'
}
};
module.exports.Component = {
schema: {
// Dimensions of the ocean area.
width: {default: 10, min: 0},
depth: {default: 10, min: 0},
// Density of waves.
density: {default: 10},
// Wave amplitude and variance.
amplitude: {default: 0.1},
amplitudeVariance: {default: 0.3},
// Wave speed and variance.
speed: {default: 1},
speedVariance: {default: 2},
// Material.
color: {default: 0x7AD2F7},
opacity: {default: 0.8}
},
/**
* Use play() instead of init(), because component mappings – unavailable as dependencies – are
* not guaranteed to have parsed when this component is initialized.
*/
play: function () {
var el = this.el,
data = this.data,
material = el.components.material;
var geometry = new THREE.PlaneGeometry(data.width, data.depth, data.density, data.density);
geometry.mergeVertices();
this.waves = [];
for (var v, i = 0, l = geometry.vertices.length; i < l; i++) {
v = geometry.vertices[i];
this.waves.push({
z: v.z,
ang: Math.random() * Math.PI * 2,
amp: data.amplitude + Math.random() * data.amplitudeVariance,
speed: (data.speed + Math.random() * data.speedVariance) / 1000 // radians / frame
});
}
if (!material) {
material = {};
material.material = new THREE.MeshPhongMaterial({
color: data.color,
transparent: data.opacity < 1,
opacity: data.opacity,
shading: THREE.FlatShading,
});
}
this.mesh = new THREE.Mesh(geometry, material.material);
el.object3D.add(this.mesh);
},
remove: function () {
this.el.object3D.remove(this.mesh);
},
tick: function (t, dt) {
if (!dt) return;
var verts = this.mesh.geometry.vertices;
for (var v, vprops, i = 0; (v = verts[i]); i++){
vprops = this.waves[i];
v.z = vprops.z + Math.sin(vprops.ang) * vprops.amp;
vprops.ang += vprops.speed * dt;
}
this.mesh.geometry.verticesNeedUpdate = true;
}
};
},{}],4:[function(require,module,exports){
/**
* Tube following a custom path.
*
* Usage:
*
* ```html
* <a-tube path="5 0 5, 5 0 -5, -5 0 -5" radius="0.5"></a-tube>
* ```
*/
module.exports.Primitive = {
defaultComponents: {
tube: {},
},
mappings: {
path: 'tube.path',
segments: 'tube.segments',
radius: 'tube.radius',
radialSegments: 'tube.radialSegments',
closed: 'tube.closed'
}
};
module.exports.Component = {
schema: {
path: {default: []},
segments: {default: 64},
radius: {default: 1},
radialSegments: {default: 8},
closed: {default: false}
},
init: function () {
var el = this.el,
data = this.data,
material = el.components.material;
if (!data.path.length) {
console.error('[a-tube] `path` property expected but not found.');
return;
}
var curve = new THREE.CatmullRomCurve3(data.path.map(function (point) {
point = point.split(' ');
return new THREE.Vector3(Number(point[0]), Number(point[1]), Number(point[2]));
}));
var geometry = new THREE.TubeGeometry(
curve, data.segments, data.radius, data.radialSegments, data.closed
);
if (!material) {
material = {};
material.material = new THREE.MeshPhongMaterial();
}
this.mesh = new THREE.Mesh(geometry, material.material);
this.el.setObject3D('mesh', this.mesh);
},
remove: function () {
if (this.mesh) this.el.removeObject3D('mesh');
}
};
},{}],5:[function(require,module,exports){
module.exports = {
'a-grid': require('./a-grid'),
'a-ocean': require('./a-ocean'),
'a-tube': require('./a-tube'),
registerAll: function (AFRAME) {
if (this._registered) return;
AFRAME = AFRAME || window.AFRAME;
AFRAME = AFRAME.aframeCore || AFRAME;
AFRAME.registerPrimitive('a-grid', this['a-grid']);
AFRAME.registerComponent('ocean', this['a-ocean'].Component);
AFRAME.registerPrimitive('a-ocean', this['a-ocean'].Primitive);
AFRAME.registerComponent('tube', this['a-tube'].Component);
AFRAME.registerPrimitive('a-tube', this['a-tube'].Primitive);
this._registered = true;
}
};
},{"./a-grid":2,"./a-ocean":3,"./a-tube":4}]},{},[1]);
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import logging
import pytest
from airflow.www_rbac import app as application
from tests.compat import mock
class TestPluginsRBAC(object):
def setup_method(self, method):
self.app, self.appbuilder = application.create_app(testing=True)
def test_flaskappbuilder_views(self):
from tests.plugins.test_plugin import v_appbuilder_package
appbuilder_class_name = str(v_appbuilder_package['view'].__class__.__name__)
plugin_views = [view for view in self.appbuilder.baseviews
if view.blueprint.name == appbuilder_class_name]
assert len(plugin_views) == 1
# view should have a menu item matching category of v_appbuilder_package
links = [menu_item for menu_item in self.appbuilder.menu.menu
if menu_item.name == v_appbuilder_package['category']]
assert len(links) == 1
# menu link should also have a link matching the name of the package.
link = links[0]
assert link.name == v_appbuilder_package['category']
assert link.childs[0].name == v_appbuilder_package['name']
def test_flaskappbuilder_menu_links(self):
from tests.plugins.test_plugin import appbuilder_mitem
# menu item should exist matching appbuilder_mitem
links = [menu_item for menu_item in self.appbuilder.menu.menu
if menu_item.name == appbuilder_mitem['category']]
assert len(links) == 1
# menu link should also have a link matching the name of the package.
link = links[0]
assert link.name == appbuilder_mitem['category']
assert link.childs[0].name == appbuilder_mitem['name']
def test_app_blueprints(self):
from tests.plugins.test_plugin import bp
# Blueprint should be present in the app
assert 'test_plugin' in self.app.blueprints
assert self.app.blueprints['test_plugin'].name == bp.name
@pytest.mark.quarantined
def test_entrypoint_plugin_errors_dont_raise_exceptions(self, caplog):
"""
Test that Airflow does not raise an Error if there is any Exception because of the
Plugin.
"""
from airflow.plugins_manager import import_errors, load_entrypoint_plugins, entry_points_with_dist
mock_dist = mock.Mock()
mock_entrypoint = mock.Mock()
mock_entrypoint.name = 'test-entrypoint'
mock_entrypoint.group = 'airflow.plugins'
mock_entrypoint.module = 'test.plugins.test_plugins_manager'
mock_entrypoint.load.side_effect = ImportError('my_fake_module not found')
mock_dist.entry_points = [mock_entrypoint]
with mock.patch('importlib_metadata.distributions', return_value=[mock_dist]), caplog.at_level(
logging.ERROR, logger='airflow.plugins_manager'
):
load_entrypoint_plugins(entry_points_with_dist('airflow.plugins'), [])
received_logs = caplog.text
# Assert Traceback is shown too
assert "Traceback (most recent call last):" in received_logs
assert "my_fake_module not found" in received_logs
assert "Failed to import plugin test-entrypoint" in received_logs
assert ("test.plugins.test_plugins_manager", "my_fake_module not found") in import_errors.items()
|
const path = require('path');
const fs = require('fs-extra');
const minimist = require('minimist');
const params = minimist(process.argv.slice(2));
const isSimple = params.simple;
const simplePath = path.resolve(
__dirname,
'../simple-pro-template/arco-design-pro-next'
);
const templatePath = path.resolve(__dirname, '../arco-design-pro-next');
const projectPath =
params.projectPath ||
path.resolve(
__dirname,
'../examples/arco-design-pro-next' + `${isSimple ? '-simple' : ''}`
);
fs.copySync(templatePath, projectPath, {
filter: (src) =>
!src.startsWith(path.resolve(templatePath, 'node_modules')) &&
src.indexOf('.next') === -1,
});
if (isSimple) {
fs.emptyDirSync(path.resolve(projectPath, 'src'));
fs.copySync(
path.resolve(simplePath, 'src'),
path.resolve(projectPath, 'src')
);
}
// next cannot be loaded on demand and needs to be imported in full
function addGlobalStyle() {
const themeStyleCode = "@import '@arco-themes/react-arco-pro/index.less';\n";
const globalStylePath = path.resolve(projectPath, 'src/style/global.less');
if (fs.existsSync(globalStylePath)) {
fs.readFile(globalStylePath, 'utf-8', (err, data) => {
if (!err) {
fs.writeFileSync(globalStylePath, themeStyleCode + data);
}
});
}
}
addGlobalStyle();
|
# Copyright 2010 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
import textwrap
import time
import fixtures
from lxml import etree
from oslo_log import log as logging
from oslo_utils.fixture import uuidsentinel as uuids
from nova.objects import fields as obj_fields
from nova.virt.libvirt import config as vconfig
# Allow passing None to the various connect methods
# (i.e. allow the client to rely on default URLs)
allow_default_uri_connection = True
# Has libvirt connection been used at least once
connection_used = False
def _reset():
global allow_default_uri_connection
allow_default_uri_connection = True
LOG = logging.getLogger(__name__)
# virDomainState
VIR_DOMAIN_NOSTATE = 0
VIR_DOMAIN_RUNNING = 1
VIR_DOMAIN_BLOCKED = 2
VIR_DOMAIN_PAUSED = 3
VIR_DOMAIN_SHUTDOWN = 4
VIR_DOMAIN_SHUTOFF = 5
VIR_DOMAIN_CRASHED = 6
# NOTE(mriedem): These values come from include/libvirt/libvirt-domain.h
VIR_DOMAIN_XML_SECURE = 1
VIR_DOMAIN_XML_INACTIVE = 2
VIR_DOMAIN_XML_UPDATE_CPU = 4
VIR_DOMAIN_XML_MIGRATABLE = 8
VIR_DOMAIN_BLOCK_REBASE_SHALLOW = 1
VIR_DOMAIN_BLOCK_REBASE_REUSE_EXT = 2
VIR_DOMAIN_BLOCK_REBASE_COPY = 8
VIR_DOMAIN_BLOCK_REBASE_COPY_DEV = 32
VIR_DOMAIN_BLOCK_JOB_ABORT_ASYNC = 1
VIR_DOMAIN_BLOCK_JOB_ABORT_PIVOT = 2
VIR_DOMAIN_EVENT_ID_LIFECYCLE = 0
VIR_DOMAIN_EVENT_DEFINED = 0
VIR_DOMAIN_EVENT_UNDEFINED = 1
VIR_DOMAIN_EVENT_STARTED = 2
VIR_DOMAIN_EVENT_SUSPENDED = 3
VIR_DOMAIN_EVENT_RESUMED = 4
VIR_DOMAIN_EVENT_STOPPED = 5
VIR_DOMAIN_EVENT_SHUTDOWN = 6
VIR_DOMAIN_EVENT_PMSUSPENDED = 7
VIR_DOMAIN_EVENT_SUSPENDED_POSTCOPY = 7
VIR_DOMAIN_UNDEFINE_MANAGED_SAVE = 1
VIR_DOMAIN_UNDEFINE_NVRAM = 4
VIR_DOMAIN_AFFECT_CURRENT = 0
VIR_DOMAIN_AFFECT_LIVE = 1
VIR_DOMAIN_AFFECT_CONFIG = 2
VIR_CPU_COMPARE_ERROR = -1
VIR_CPU_COMPARE_INCOMPATIBLE = 0
VIR_CPU_COMPARE_IDENTICAL = 1
VIR_CPU_COMPARE_SUPERSET = 2
VIR_CRED_USERNAME = 1
VIR_CRED_AUTHNAME = 2
VIR_CRED_LANGUAGE = 3
VIR_CRED_CNONCE = 4
VIR_CRED_PASSPHRASE = 5
VIR_CRED_ECHOPROMPT = 6
VIR_CRED_NOECHOPROMPT = 7
VIR_CRED_REALM = 8
VIR_CRED_EXTERNAL = 9
VIR_MIGRATE_LIVE = 1
VIR_MIGRATE_PEER2PEER = 2
VIR_MIGRATE_TUNNELLED = 4
VIR_MIGRATE_PERSIST_DEST = 8
VIR_MIGRATE_UNDEFINE_SOURCE = 16
VIR_MIGRATE_NON_SHARED_INC = 128
VIR_MIGRATE_AUTO_CONVERGE = 8192
VIR_MIGRATE_POSTCOPY = 32768
VIR_MIGRATE_TLS = 65536
VIR_NODE_CPU_STATS_ALL_CPUS = -1
VIR_DOMAIN_START_PAUSED = 1
# libvirtError enums
# (Intentionally different from what's in libvirt. We do this to check,
# that consumers of the library are using the symbolic names rather than
# hardcoding the numerical values)
VIR_FROM_QEMU = 100
VIR_FROM_DOMAIN = 200
VIR_FROM_NWFILTER = 330
VIR_FROM_REMOTE = 340
VIR_FROM_RPC = 345
VIR_FROM_NODEDEV = 666
VIR_ERR_INVALID_ARG = 8
VIR_ERR_NO_SUPPORT = 3
VIR_ERR_XML_ERROR = 27
VIR_ERR_XML_DETAIL = 350
VIR_ERR_NO_DOMAIN = 420
VIR_ERR_OPERATION_FAILED = 510
VIR_ERR_OPERATION_INVALID = 55
VIR_ERR_OPERATION_TIMEOUT = 68
VIR_ERR_NO_NWFILTER = 620
VIR_ERR_SYSTEM_ERROR = 900
VIR_ERR_INTERNAL_ERROR = 950
VIR_ERR_CONFIG_UNSUPPORTED = 951
VIR_ERR_NO_NODE_DEVICE = 667
VIR_ERR_NO_SECRET = 66
VIR_ERR_AGENT_UNRESPONSIVE = 86
VIR_ERR_ARGUMENT_UNSUPPORTED = 74
VIR_ERR_OPERATION_UNSUPPORTED = 84
# Readonly
VIR_CONNECT_RO = 1
# virConnectBaselineCPU flags
VIR_CONNECT_BASELINE_CPU_EXPAND_FEATURES = 1
# snapshotCreateXML flags
VIR_DOMAIN_SNAPSHOT_CREATE_NO_METADATA = 4
VIR_DOMAIN_SNAPSHOT_CREATE_DISK_ONLY = 16
VIR_DOMAIN_SNAPSHOT_CREATE_REUSE_EXT = 32
VIR_DOMAIN_SNAPSHOT_CREATE_QUIESCE = 64
# blockCommit flags
VIR_DOMAIN_BLOCK_COMMIT_RELATIVE = 4
# blockRebase flags
VIR_DOMAIN_BLOCK_REBASE_RELATIVE = 8
VIR_CONNECT_LIST_DOMAINS_ACTIVE = 1
VIR_CONNECT_LIST_DOMAINS_INACTIVE = 2
# secret type
VIR_SECRET_USAGE_TYPE_NONE = 0
VIR_SECRET_USAGE_TYPE_VOLUME = 1
VIR_SECRET_USAGE_TYPE_CEPH = 2
VIR_SECRET_USAGE_TYPE_ISCSI = 3
# Libvirt version to match MIN_LIBVIRT_VERSION in driver.py
FAKE_LIBVIRT_VERSION = 3000000
# Libvirt version to match MIN_QEMU_VERSION in driver.py
FAKE_QEMU_VERSION = 2008000
PCI_VEND_ID = '8086'
PCI_VEND_NAME = 'Intel Corporation'
PCI_PROD_ID = '1533'
PCI_PROD_NAME = 'I210 Gigabit Network Connection'
PCI_DRIVER_NAME = 'igb'
PF_PROD_ID = '1528'
PF_PROD_NAME = 'Ethernet Controller 10-Gigabit X540-AT2'
PF_DRIVER_NAME = 'ixgbe'
PF_CAP_TYPE = 'virt_functions'
VF_PROD_ID = '1515'
VF_PROD_NAME = 'X540 Ethernet Controller Virtual Function'
VF_DRIVER_NAME = 'ixgbevf'
VF_CAP_TYPE = 'phys_function'
NVIDIA_11_VGPU_TYPE = 'nvidia-11'
PGPU1_PCI_ADDR = 'pci_0000_06_00_0'
PGPU2_PCI_ADDR = 'pci_0000_07_00_0'
PGPU3_PCI_ADDR = 'pci_0000_08_00_0'
class FakePCIDevice(object):
"""Generate a fake PCI device.
Generate a fake PCI devices corresponding to one of the following
real-world PCI devices.
- I210 Gigabit Network Connection (8086:1533)
- Ethernet Controller 10-Gigabit X540-AT2 (8086:1528)
- X540 Ethernet Controller Virtual Function (8086:1515)
"""
pci_device_template = textwrap.dedent("""
<device>
<name>pci_0000_81_%(slot)02x_%(function)d</name>
<path>/sys/devices/pci0000:80/0000:80:01.0/0000:81:%(slot)02x.%(function)d</path>
<parent>pci_0000_80_01_0</parent>
<driver>
<name>%(driver)s</name>
</driver>
<capability type='pci'>
<domain>0</domain>
<bus>129</bus>
<slot>%(slot)d</slot>
<function>%(function)d</function>
<product id='0x%(prod_id)s'>%(prod_name)s</product>
<vendor id='0x%(vend_id)s'>%(vend_name)s</vendor>
%(capability)s
<iommuGroup number='%(iommu_group)d'>
<address domain='0x0000' bus='0x81' slot='%(slot)#02x' function='0x%(function)d'/>
</iommuGroup>
<numa node='%(numa_node)s'/>
<pci-express>
<link validity='cap' port='0' speed='5' width='8'/>
<link validity='sta' speed='5' width='8'/>
</pci-express>
</capability>
</device>""".strip()) # noqa
cap_templ = "<capability type='%(cap_type)s'>%(addresses)s</capability>"
addr_templ = "<address domain='0x0000' bus='0x81' slot='%(slot)#02x' function='%(function)#02x'/>" # noqa
def __init__(self, dev_type, slot, function, iommu_group, numa_node,
vf_ratio=None):
"""Populate pci devices
:param dev_type: (string) Indicates the type of the device (PCI, PF,
VF).
:param slot: (int) Slot number of the device.
:param function: (int) Function number of the device.
:param iommu_group: (int) IOMMU group ID.
:param numa_node: (int) NUMA node of the device.
:param vf_ratio: (int) Ratio of Virtual Functions on Physical. Only
applicable if ``dev_type`` is one of: ``PF``, ``VF``.
"""
if dev_type == 'PCI':
if vf_ratio:
raise ValueError('vf_ratio does not apply for PCI devices')
prod_id = PCI_PROD_ID
prod_name = PCI_PROD_NAME
driver = PCI_DRIVER_NAME
capability = ''
elif dev_type == 'PF':
prod_id = PF_PROD_ID
prod_name = PF_PROD_NAME
driver = PF_DRIVER_NAME
capability = self.cap_templ % {
'cap_type': PF_CAP_TYPE,
'addresses': '\n'.join([
self.addr_templ % {
# these are the slot, function values of the child VFs
# we can only assign 8 functions to a slot (0-7) so
# bump the slot each time we exceed this
'slot': slot + (x // 8),
# ...and wrap the function value
'function': x % 8,
# the offset is because the PF is occupying function 0
} for x in range(1, vf_ratio + 1)])
}
elif dev_type == 'VF':
prod_id = VF_PROD_ID
prod_name = VF_PROD_NAME
driver = VF_DRIVER_NAME
capability = self.cap_templ % {
'cap_type': VF_CAP_TYPE,
'addresses': self.addr_templ % {
# this is the slot, function value of the parent PF
# if we're e.g. device 8, we'll have a different slot
# to our parent so reverse this
'slot': slot - ((vf_ratio + 1) // 8),
# the parent PF is always function 0
'function': 0,
}
}
else:
raise ValueError('Expected one of: PCI, VF, PCI')
self.pci_device = self.pci_device_template % {
'slot': slot,
'function': function,
'vend_id': PCI_VEND_ID,
'vend_name': PCI_VEND_NAME,
'prod_id': prod_id,
'prod_name': prod_name,
'driver': driver,
'capability': capability,
'iommu_group': iommu_group,
'numa_node': numa_node,
}
def XMLDesc(self, flags):
return self.pci_device
class HostPCIDevicesInfo(object):
"""Represent a pool of host PCI devices."""
TOTAL_NUMA_NODES = 2
pci_devname_template = 'pci_0000_81_%(slot)02x_%(function)d'
def __init__(self, num_pci=0, num_pfs=2, num_vfs=8, numa_node=None):
"""Create a new HostPCIDevicesInfo object.
:param num_pci: (int) The number of (non-SR-IOV) PCI devices.
:param num_pfs: (int) The number of PCI SR-IOV Physical Functions.
:param num_vfs: (int) The number of PCI SR-IOV Virtual Functions.
:param iommu_group: (int) Initial IOMMU group ID.
:param numa_node: (int) NUMA node of the device; if set all of the
devices will be assigned to the specified node else they will be
split between ``$TOTAL_NUMA_NODES`` nodes.
"""
self.devices = {}
if not (num_vfs or num_pfs):
return
if num_vfs and not num_pfs:
raise ValueError('Cannot create VFs without PFs')
if num_vfs % num_pfs:
raise ValueError('num_vfs must be a factor of num_pfs')
slot = 0
function = 0
iommu_group = 40 # totally arbitrary number
# Generate PCI devs
for dev in range(num_pci):
pci_dev_name = self.pci_devname_template % {
'slot': slot, 'function': function}
LOG.info('Generating PCI device %r', pci_dev_name)
self.devices[pci_dev_name] = FakePCIDevice(
dev_type='PCI',
slot=slot,
function=function,
iommu_group=iommu_group,
numa_node=self._calc_numa_node(dev, numa_node))
slot += 1
iommu_group += 1
vf_ratio = num_vfs // num_pfs if num_pfs else 0
# Generate PFs
for dev in range(num_pfs):
function = 0
numa_node_pf = self._calc_numa_node(dev, numa_node)
pci_dev_name = self.pci_devname_template % {
'slot': slot, 'function': function}
LOG.info('Generating PF device %r', pci_dev_name)
self.devices[pci_dev_name] = FakePCIDevice(
dev_type='PF',
slot=slot,
function=function,
iommu_group=iommu_group,
numa_node=numa_node_pf,
vf_ratio=vf_ratio)
# Generate VFs
for _ in range(vf_ratio):
function += 1
iommu_group += 1
if function % 8 == 0:
# functions must be 0-7
slot += 1
function = 0
pci_dev_name = self.pci_devname_template % {
'slot': slot, 'function': function}
LOG.info('Generating VF device %r', pci_dev_name)
self.devices[pci_dev_name] = FakePCIDevice(
dev_type='VF',
slot=slot,
function=function,
iommu_group=iommu_group,
numa_node=numa_node_pf,
vf_ratio=vf_ratio)
slot += 1
@classmethod
def _calc_numa_node(cls, dev, numa_node):
return dev % cls.TOTAL_NUMA_NODES if numa_node is None else numa_node
def get_all_devices(self):
return self.devices.keys()
def get_device_by_name(self, device_name):
pci_dev = self.devices.get(device_name)
return pci_dev
class FakeMdevDevice(object):
template = """
<device>
<name>%(dev_name)s</name>
<path>/sys/devices/pci0000:00/0000:00:02.0/%(path)s</path>
<parent>%(parent)s</parent>
<driver>
<name>vfio_mdev</name>
</driver>
<capability type='mdev'>
<type id='%(type_id)s'/>
<iommuGroup number='12'/>
</capability>
</device>
"""
def __init__(self, dev_name, type_id, parent):
self.xml = self.template % {
'dev_name': dev_name, 'type_id': type_id,
'path': dev_name[len('mdev_'):],
'parent': parent}
def XMLDesc(self, flags):
return self.xml
class HostMdevDevicesInfo(object):
def __init__(self):
self.devices = {
'mdev_4b20d080_1b54_4048_85b3_a6a62d165c01':
FakeMdevDevice(
dev_name='mdev_4b20d080_1b54_4048_85b3_a6a62d165c01',
type_id=NVIDIA_11_VGPU_TYPE, parent=PGPU1_PCI_ADDR),
'mdev_4b20d080_1b54_4048_85b3_a6a62d165c02':
FakeMdevDevice(
dev_name='mdev_4b20d080_1b54_4048_85b3_a6a62d165c02',
type_id=NVIDIA_11_VGPU_TYPE, parent=PGPU2_PCI_ADDR),
'mdev_4b20d080_1b54_4048_85b3_a6a62d165c03':
FakeMdevDevice(
dev_name='mdev_4b20d080_1b54_4048_85b3_a6a62d165c03',
type_id=NVIDIA_11_VGPU_TYPE, parent=PGPU3_PCI_ADDR),
}
def get_all_devices(self):
return self.devices.keys()
def get_device_by_name(self, device_name):
dev = self.devices[device_name]
return dev
class HostInfo(object):
def __init__(self, arch=obj_fields.Architecture.X86_64, kB_mem=4096,
cpus=2, cpu_mhz=800, cpu_nodes=1,
cpu_sockets=1, cpu_cores=2,
cpu_threads=1, cpu_model="Penryn",
cpu_vendor="Intel", numa_topology='',
cpu_disabled=None):
"""Create a new Host Info object
:param arch: (string) indicating the CPU arch
(eg 'i686' or whatever else uname -m might return)
:param kB_mem: (int) memory size in KBytes
:param cpus: (int) the number of active CPUs
:param cpu_mhz: (int) expected CPU frequency
:param cpu_nodes: (int) the number of NUMA cell, 1 for unusual
NUMA topologies or uniform
:param cpu_sockets: (int) number of CPU sockets per node if nodes > 1,
total number of CPU sockets otherwise
:param cpu_cores: (int) number of cores per socket
:param cpu_threads: (int) number of threads per core
:param cpu_model: CPU model
:param cpu_vendor: CPU vendor
:param numa_topology: Numa topology
:param cpu_disabled: List of disabled cpus
"""
self.arch = arch
self.kB_mem = kB_mem
self.cpus = cpus
self.cpu_mhz = cpu_mhz
self.cpu_nodes = cpu_nodes
self.cpu_cores = cpu_cores
self.cpu_threads = cpu_threads
self.cpu_sockets = cpu_sockets
self.cpu_model = cpu_model
self.cpu_vendor = cpu_vendor
self.numa_topology = numa_topology
self.disabled_cpus_list = cpu_disabled or []
class NUMAHostInfo(HostInfo):
"""A NUMA-by-default variant of HostInfo."""
def __init__(self, **kwargs):
super(NUMAHostInfo, self).__init__(**kwargs)
if not self.numa_topology:
topology = NUMATopology(self.cpu_nodes, self.cpu_sockets,
self.cpu_cores, self.cpu_threads,
self.kB_mem)
self.numa_topology = topology
# update number of active cpus
cpu_count = len(topology.cells) * len(topology.cells[0].cpus)
self.cpus = cpu_count - len(self.disabled_cpus_list)
class NUMATopology(vconfig.LibvirtConfigCapsNUMATopology):
"""A batteries-included variant of LibvirtConfigCapsNUMATopology.
Provides sane defaults for LibvirtConfigCapsNUMATopology that can be used
in tests as is, or overridden where necessary.
"""
def __init__(self, cpu_nodes=4, cpu_sockets=1, cpu_cores=1, cpu_threads=2,
kb_mem=1048576, mempages=None, **kwargs):
super(NUMATopology, self).__init__(**kwargs)
cpu_count = 0
for cell_count in range(cpu_nodes):
cell = vconfig.LibvirtConfigCapsNUMACell()
cell.id = cell_count
cell.memory = kb_mem // cpu_nodes
for socket_count in range(cpu_sockets):
for cpu_num in range(cpu_cores * cpu_threads):
cpu = vconfig.LibvirtConfigCapsNUMACPU()
cpu.id = cpu_count
cpu.socket_id = cell_count
cpu.core_id = cpu_num // cpu_threads
cpu.siblings = set([cpu_threads *
(cpu_count // cpu_threads) + thread
for thread in range(cpu_threads)])
cell.cpus.append(cpu)
cpu_count += 1
# If no mempages are provided, use only the default 4K pages
if mempages:
cell.mempages = mempages[cell_count]
else:
cell.mempages = create_mempages([(4, cell.memory // 4)])
self.cells.append(cell)
def create_mempages(mappings):
"""Generate a list of LibvirtConfigCapsNUMAPages objects.
:param mappings: (dict) A mapping of page size to quantity of
said pages.
:returns: [LibvirtConfigCapsNUMAPages, ...]
"""
mempages = []
for page_size, page_qty in mappings:
mempage = vconfig.LibvirtConfigCapsNUMAPages()
mempage.size = page_size
mempage.total = page_qty
mempages.append(mempage)
return mempages
VIR_DOMAIN_JOB_NONE = 0
VIR_DOMAIN_JOB_BOUNDED = 1
VIR_DOMAIN_JOB_UNBOUNDED = 2
VIR_DOMAIN_JOB_COMPLETED = 3
VIR_DOMAIN_JOB_FAILED = 4
VIR_DOMAIN_JOB_CANCELLED = 5
def _parse_disk_info(element):
disk_info = {}
disk_info['type'] = element.get('type', 'file')
disk_info['device'] = element.get('device', 'disk')
driver = element.find('./driver')
if driver is not None:
disk_info['driver_name'] = driver.get('name')
disk_info['driver_type'] = driver.get('type')
source = element.find('./source')
if source is not None:
disk_info['source'] = source.get('file')
if not disk_info['source']:
disk_info['source'] = source.get('dev')
if not disk_info['source']:
disk_info['source'] = source.get('path')
target = element.find('./target')
if target is not None:
disk_info['target_dev'] = target.get('dev')
disk_info['target_bus'] = target.get('bus')
return disk_info
def _parse_nic_info(element):
nic_info = {}
nic_info['type'] = element.get('type', 'bridge')
driver = element.find('./mac')
if driver is not None:
nic_info['mac'] = driver.get('address')
source = element.find('./source')
if source is not None:
nic_info['source'] = source.get('bridge')
target = element.find('./target')
if target is not None:
nic_info['target_dev'] = target.get('dev')
return nic_info
def disable_event_thread(self):
"""Disable nova libvirt driver event thread.
The Nova libvirt driver includes a native thread which monitors
the libvirt event channel. In a testing environment this becomes
problematic because it means we've got a floating thread calling
sleep(1) over the life of the unit test. Seems harmless? It's not,
because we sometimes want to test things like retry loops that
should have specific sleep paterns. An unlucky firing of the
libvirt thread will cause a test failure.
"""
# because we are patching a method in a class MonkeyPatch doesn't
# auto import correctly. Import explicitly otherwise the patching
# may silently fail.
import nova.virt.libvirt.host # noqa
def evloop(*args, **kwargs):
pass
self.useFixture(fixtures.MockPatch(
'nova.virt.libvirt.host.Host._init_events',
side_effect=evloop))
class libvirtError(Exception):
"""This class was copied and slightly modified from
`libvirt-python:libvirt-override.py`.
Since a test environment will use the real `libvirt-python` version of
`libvirtError` if it's installed and not this fake, we need to maintain
strict compatibility with the original class, including `__init__` args
and instance-attributes.
To create a libvirtError instance you should:
# Create an unsupported error exception
exc = libvirtError('my message')
exc.err = (libvirt.VIR_ERR_NO_SUPPORT,)
self.err is a tuple of form:
(error_code, error_domain, error_message, error_level, str1, str2,
str3, int1, int2)
Alternatively, you can use the `make_libvirtError` convenience function to
allow you to specify these attributes in one shot.
"""
def __init__(self, defmsg, conn=None, dom=None, net=None, pool=None,
vol=None):
Exception.__init__(self, defmsg)
self.err = None
def get_error_code(self):
if self.err is None:
return None
return self.err[0]
def get_error_domain(self):
if self.err is None:
return None
return self.err[1]
def get_error_message(self):
if self.err is None:
return None
return self.err[2]
def get_error_level(self):
if self.err is None:
return None
return self.err[3]
def get_str1(self):
if self.err is None:
return None
return self.err[4]
def get_str2(self):
if self.err is None:
return None
return self.err[5]
def get_str3(self):
if self.err is None:
return None
return self.err[6]
def get_int1(self):
if self.err is None:
return None
return self.err[7]
def get_int2(self):
if self.err is None:
return None
return self.err[8]
class NWFilter(object):
def __init__(self, connection, xml):
self._connection = connection
self._xml = xml
self._parse_xml(xml)
def _parse_xml(self, xml):
tree = etree.fromstring(xml)
root = tree.find('.')
self._name = root.get('name')
def undefine(self):
self._connection._remove_filter(self)
class NodeDevice(object):
def __init__(self, connection, xml=None):
self._connection = connection
self._xml = xml
if xml is not None:
self._parse_xml(xml)
def _parse_xml(self, xml):
tree = etree.fromstring(xml)
root = tree.find('.')
self._name = root.get('name')
def attach(self):
pass
def dettach(self):
pass
def reset(self):
pass
class Domain(object):
def __init__(self, connection, xml, running=False, transient=False):
self._connection = connection
if running:
connection._mark_running(self)
self._state = running and VIR_DOMAIN_RUNNING or VIR_DOMAIN_SHUTOFF
self._transient = transient
self._def = self._parse_definition(xml)
self._has_saved_state = False
self._snapshots = {}
self._id = self._connection._id_counter
def _parse_definition(self, xml):
try:
tree = etree.fromstring(xml)
except etree.ParseError:
raise make_libvirtError(
libvirtError, "Invalid XML.",
error_code=VIR_ERR_XML_DETAIL,
error_domain=VIR_FROM_DOMAIN)
definition = {}
name = tree.find('./name')
if name is not None:
definition['name'] = name.text
uuid_elem = tree.find('./uuid')
if uuid_elem is not None:
definition['uuid'] = uuid_elem.text
else:
definition['uuid'] = uuids.fake
vcpu = tree.find('./vcpu')
if vcpu is not None:
definition['vcpu'] = int(vcpu.text)
memory = tree.find('./memory')
if memory is not None:
definition['memory'] = int(memory.text)
os = {}
os_type = tree.find('./os/type')
if os_type is not None:
os['type'] = os_type.text
os['arch'] = os_type.get('arch', self._connection.host_info.arch)
os_kernel = tree.find('./os/kernel')
if os_kernel is not None:
os['kernel'] = os_kernel.text
os_initrd = tree.find('./os/initrd')
if os_initrd is not None:
os['initrd'] = os_initrd.text
os_cmdline = tree.find('./os/cmdline')
if os_cmdline is not None:
os['cmdline'] = os_cmdline.text
os_boot = tree.find('./os/boot')
if os_boot is not None:
os['boot_dev'] = os_boot.get('dev')
definition['os'] = os
features = {}
acpi = tree.find('./features/acpi')
if acpi is not None:
features['acpi'] = True
definition['features'] = features
devices = {}
device_nodes = tree.find('./devices')
if device_nodes is not None:
disks_info = []
disks = device_nodes.findall('./disk')
for disk in disks:
disks_info += [_parse_disk_info(disk)]
devices['disks'] = disks_info
nics_info = []
nics = device_nodes.findall('./interface')
for nic in nics:
nic_info = {}
nic_info['type'] = nic.get('type')
mac = nic.find('./mac')
if mac is not None:
nic_info['mac'] = mac.get('address')
source = nic.find('./source')
if source is not None:
if nic_info['type'] == 'network':
nic_info['source'] = source.get('network')
elif nic_info['type'] == 'bridge':
nic_info['source'] = source.get('bridge')
nics_info += [nic_info]
devices['nics'] = nics_info
hostdev_info = []
hostdevs = device_nodes.findall('./hostdev')
for hostdev in hostdevs:
address = hostdev.find('./source/address')
# NOTE(gibi): only handle mdevs as pci is complicated
dev_type = hostdev.get('type')
if dev_type == 'mdev':
hostdev_info.append({
'type': dev_type,
'model': hostdev.get('model'),
'address_uuid': address.get('uuid')
})
devices['hostdevs'] = hostdev_info
definition['devices'] = devices
return definition
def create(self):
self.createWithFlags(0)
def createWithFlags(self, flags):
# FIXME: Not handling flags at the moment
self._state = VIR_DOMAIN_RUNNING
self._connection._mark_running(self)
self._has_saved_state = False
def isActive(self):
return int(self._state == VIR_DOMAIN_RUNNING)
def undefine(self):
self._connection._undefine(self)
def isPersistent(self):
return True
def undefineFlags(self, flags):
self.undefine()
if flags & VIR_DOMAIN_UNDEFINE_MANAGED_SAVE:
if self.hasManagedSaveImage(0):
self.managedSaveRemove()
def destroy(self):
self._state = VIR_DOMAIN_SHUTOFF
self._connection._mark_not_running(self)
def ID(self):
return self._id
def name(self):
return self._def['name']
def UUIDString(self):
return self._def['uuid']
def interfaceStats(self, device):
return [10000242400, 1234, 0, 2, 213412343233, 34214234, 23, 3]
def blockStats(self, device):
return [2, 10000242400, 234, 2343424234, 34]
def setTime(self, time=None, flags=0):
pass
def suspend(self):
self._state = VIR_DOMAIN_PAUSED
def shutdown(self):
self._state = VIR_DOMAIN_SHUTDOWN
self._connection._mark_not_running(self)
def reset(self, flags):
# FIXME: Not handling flags at the moment
self._state = VIR_DOMAIN_RUNNING
self._connection._mark_running(self)
def info(self):
return [self._state,
int(self._def['memory']),
int(self._def['memory']),
self._def['vcpu'],
123456789]
def migrateToURI3(self, dconnuri, params, flags):
raise make_libvirtError(
libvirtError,
"Migration always fails for fake libvirt!",
error_code=VIR_ERR_INTERNAL_ERROR,
error_domain=VIR_FROM_QEMU)
def migrateSetMaxDowntime(self, downtime):
pass
def attachDevice(self, xml):
result = False
if xml.startswith("<disk"):
disk_info = _parse_disk_info(etree.fromstring(xml))
disk_info['_attached'] = True
self._def['devices']['disks'] += [disk_info]
result = True
elif xml.startswith("<interface"):
nic_info = _parse_nic_info(etree.fromstring(xml))
nic_info['_attached'] = True
self._def['devices']['nics'] += [nic_info]
result = True
return result
def attachDeviceFlags(self, xml, flags):
if (flags & VIR_DOMAIN_AFFECT_LIVE and
self._state != VIR_DOMAIN_RUNNING):
raise make_libvirtError(
libvirtError,
"AFFECT_LIVE only allowed for running domains!",
error_code=VIR_ERR_INTERNAL_ERROR,
error_domain=VIR_FROM_QEMU)
self.attachDevice(xml)
def detachDevice(self, xml):
disk_info = _parse_disk_info(etree.fromstring(xml))
disk_info['_attached'] = True
return disk_info in self._def['devices']['disks']
def detachDeviceFlags(self, xml, flags):
self.detachDevice(xml)
def setUserPassword(self, user, password, flags=0):
pass
def XMLDesc(self, flags):
disks = ''
for disk in self._def['devices']['disks']:
if disk['type'] == 'file':
source_attr = 'file'
else:
source_attr = 'dev'
disks += '''<disk type='%(type)s' device='%(device)s'>
<driver name='%(driver_name)s' type='%(driver_type)s'/>
<source %(source_attr)s='%(source)s'/>
<target dev='%(target_dev)s' bus='%(target_bus)s'/>
<address type='drive' controller='0' bus='0' unit='0'/>
</disk>''' % dict(source_attr=source_attr, **disk)
nics = ''
for nic in self._def['devices']['nics']:
if 'source' in nic:
nics += '''<interface type='%(type)s'>
<mac address='%(mac)s'/>
<source %(type)s='%(source)s'/>
<target dev='tap274487d1-60'/>
<address type='pci' domain='0x0000' bus='0x00' slot='0x03'
function='0x0'/>
</interface>''' % nic
# this covers for direct nic type
else:
nics += '''<interface type='%(type)s'>
<mac address='%(mac)s'/>
<source>
<address type='pci' domain='0x0000' bus='0x81' slot='0x00'
function='0x01'/>
</source>
</interface>''' % nic
hostdevs = ''
for hostdev in self._def['devices']['hostdevs']:
hostdevs += '''<hostdev mode='subsystem' type='%(type)s' model='%(model)s'>
<source>
<address uuid='%(address_uuid)s'/>
</source>
</hostdev>
''' % hostdev # noqa
return '''<domain type='kvm'>
<name>%(name)s</name>
<uuid>%(uuid)s</uuid>
<memory>%(memory)s</memory>
<currentMemory>%(memory)s</currentMemory>
<vcpu>%(vcpu)s</vcpu>
<os>
<type arch='%(arch)s' machine='pc-0.12'>hvm</type>
<boot dev='hd'/>
</os>
<features>
<acpi/>
<apic/>
<pae/>
</features>
<clock offset='localtime'/>
<on_poweroff>destroy</on_poweroff>
<on_reboot>restart</on_reboot>
<on_crash>restart</on_crash>
<devices>
<emulator>/usr/bin/kvm</emulator>
%(disks)s
<controller type='ide' index='0'>
<address type='pci' domain='0x0000' bus='0x00' slot='0x01'
function='0x1'/>
</controller>
%(nics)s
<serial type='file'>
<source path='dummy.log'/>
<target port='0'/>
</serial>
<serial type='pty'>
<source pty='/dev/pts/27'/>
<target port='1'/>
</serial>
<serial type='tcp'>
<source host="-1" service="-1" mode="bind"/>
</serial>
<console type='file'>
<source path='dummy.log'/>
<target port='0'/>
</console>
<input type='tablet' bus='usb'/>
<input type='mouse' bus='ps2'/>
<graphics type='vnc' port='-1' autoport='yes'/>
<graphics type='spice' port='-1' autoport='yes'/>
<video>
<model type='cirrus' vram='9216' heads='1'/>
<address type='pci' domain='0x0000' bus='0x00' slot='0x02'
function='0x0'/>
</video>
<memballoon model='virtio'>
<address type='pci' domain='0x0000' bus='0x00' slot='0x04'
function='0x0'/>
</memballoon>
%(hostdevs)s
</devices>
</domain>''' % {'name': self._def['name'],
'uuid': self._def['uuid'],
'memory': self._def['memory'],
'vcpu': self._def['vcpu'],
'arch': self._def['os']['arch'],
'disks': disks,
'nics': nics,
'hostdevs': hostdevs}
def managedSave(self, flags):
self._connection._mark_not_running(self)
self._has_saved_state = True
def managedSaveRemove(self, flags):
self._has_saved_state = False
def hasManagedSaveImage(self, flags):
return int(self._has_saved_state)
def resume(self):
self._state = VIR_DOMAIN_RUNNING
def snapshotCreateXML(self, xml, flags):
tree = etree.fromstring(xml)
name = tree.find('./name').text
snapshot = DomainSnapshot(name, self)
self._snapshots[name] = snapshot
return snapshot
def vcpus(self):
vcpus = ([], [])
for i in range(0, self._def['vcpu']):
vcpus[0].append((i, 1, 120405, i))
vcpus[1].append((True, True, True, True))
return vcpus
def memoryStats(self):
return {}
def maxMemory(self):
return self._def['memory']
def blockJobInfo(self, disk, flags):
return {}
def blockJobAbort(self, disk, flags):
pass
def blockResize(self, disk, size):
pass
def blockRebase(self, disk, base, bandwidth=0, flags=0):
if (not base) and (flags and VIR_DOMAIN_BLOCK_REBASE_RELATIVE):
raise make_libvirtError(
libvirtError,
'flag VIR_DOMAIN_BLOCK_REBASE_RELATIVE is '
'valid only with non-null base',
error_code=VIR_ERR_INVALID_ARG,
error_domain=VIR_FROM_QEMU)
return 0
def blockCommit(self, disk, base, top, flags):
return 0
def jobInfo(self):
# NOTE(danms): This is an array of 12 integers, so just report
# something to avoid an IndexError if we look at this
return [0] * 12
def jobStats(self, flags=0):
return {}
def injectNMI(self, flags=0):
return 0
def abortJob(self):
pass
def fsFreeze(self):
pass
def fsThaw(self):
pass
class DomainSnapshot(object):
def __init__(self, name, domain):
self._name = name
self._domain = domain
def delete(self, flags):
del self._domain._snapshots[self._name]
class Connection(object):
def __init__(self, uri=None, readonly=False, version=FAKE_LIBVIRT_VERSION,
hv_version=FAKE_QEMU_VERSION, host_info=None, pci_info=None,
mdev_info=None):
if not uri or uri == '':
if allow_default_uri_connection:
uri = 'qemu:///session'
else:
raise ValueError("URI was None, but fake libvirt is "
"configured to not accept this.")
uri_whitelist = ['qemu:///system',
'qemu:///session',
'lxc:///', # from LibvirtDriver._uri()
'xen:///', # from LibvirtDriver._uri()
'uml:///system',
'test:///default',
'parallels:///system']
if uri not in uri_whitelist:
raise make_libvirtError(
libvirtError,
"libvirt error: no connection driver "
"available for No connection for URI %s" % uri,
error_code=5, error_domain=0)
self.readonly = readonly
self._uri = uri
self._vms = {}
self._running_vms = {}
self._id_counter = 1 # libvirt reserves 0 for the hypervisor.
self._nwfilters = {}
self._nodedevs = {}
self._event_callbacks = {}
self.fakeLibVersion = version
self.fakeVersion = hv_version
self.host_info = host_info or HostInfo()
self.pci_info = pci_info or HostPCIDevicesInfo(num_pci=0,
num_pfs=0,
num_vfs=0)
self.mdev_info = mdev_info or []
def _add_filter(self, nwfilter):
self._nwfilters[nwfilter._name] = nwfilter
def _remove_filter(self, nwfilter):
del self._nwfilters[nwfilter._name]
def _add_nodedev(self, nodedev):
self._nodedevs[nodedev._name] = nodedev
def _remove_nodedev(self, nodedev):
del self._nodedevs[nodedev._name]
def _mark_running(self, dom):
self._running_vms[self._id_counter] = dom
self._emit_lifecycle(dom, VIR_DOMAIN_EVENT_STARTED, 0)
self._id_counter += 1
def _mark_not_running(self, dom):
if dom._transient:
self._undefine(dom)
dom._id = -1
for (k, v) in self._running_vms.items():
if v == dom:
del self._running_vms[k]
self._emit_lifecycle(dom, VIR_DOMAIN_EVENT_STOPPED, 0)
return
def _undefine(self, dom):
del self._vms[dom.name()]
if not dom._transient:
self._emit_lifecycle(dom, VIR_DOMAIN_EVENT_UNDEFINED, 0)
def getInfo(self):
return [self.host_info.arch,
self.host_info.kB_mem,
self.host_info.cpus,
self.host_info.cpu_mhz,
self.host_info.cpu_nodes,
self.host_info.cpu_sockets,
self.host_info.cpu_cores,
self.host_info.cpu_threads]
def lookupByUUIDString(self, uuid):
for vm in self._vms.values():
if vm.UUIDString() == uuid:
return vm
raise make_libvirtError(
libvirtError,
'Domain not found: no domain with matching uuid "%s"' % uuid,
error_code=VIR_ERR_NO_DOMAIN,
error_domain=VIR_FROM_QEMU)
def listAllDomains(self, flags=None):
vms = []
for vm in self._vms.values():
if flags & VIR_CONNECT_LIST_DOMAINS_ACTIVE:
if vm._state != VIR_DOMAIN_SHUTOFF:
vms.append(vm)
if flags & VIR_CONNECT_LIST_DOMAINS_INACTIVE:
if vm._state == VIR_DOMAIN_SHUTOFF:
vms.append(vm)
return vms
def _emit_lifecycle(self, dom, event, detail):
if VIR_DOMAIN_EVENT_ID_LIFECYCLE not in self._event_callbacks:
return
cbinfo = self._event_callbacks[VIR_DOMAIN_EVENT_ID_LIFECYCLE]
callback = cbinfo[0]
opaque = cbinfo[1]
callback(self, dom, event, detail, opaque)
def defineXML(self, xml):
dom = Domain(connection=self, running=False, transient=False, xml=xml)
self._vms[dom.name()] = dom
self._emit_lifecycle(dom, VIR_DOMAIN_EVENT_DEFINED, 0)
return dom
def createXML(self, xml, flags):
dom = Domain(connection=self, running=True, transient=True, xml=xml)
self._vms[dom.name()] = dom
self._emit_lifecycle(dom, VIR_DOMAIN_EVENT_STARTED, 0)
return dom
def getType(self):
if self._uri == 'qemu:///system':
return 'QEMU'
def getLibVersion(self):
return self.fakeLibVersion
def getVersion(self):
return self.fakeVersion
def getHostname(self):
return 'compute1'
def domainEventRegisterAny(self, dom, eventid, callback, opaque):
self._event_callbacks[eventid] = [callback, opaque]
def registerCloseCallback(self, cb, opaque):
pass
def getCPUMap(self):
"""Return calculated CPU map from HostInfo, by default showing 2
online CPUs.
"""
active_cpus = self.host_info.cpus
total_cpus = active_cpus + len(self.host_info.disabled_cpus_list)
cpu_map = [True if cpu_num not in self.host_info.disabled_cpus_list
else False for cpu_num in range(total_cpus)]
return (total_cpus, cpu_map, active_cpus)
def getCapabilities(self):
"""Return spoofed capabilities."""
numa_topology = self.host_info.numa_topology
if isinstance(numa_topology, vconfig.LibvirtConfigCapsNUMATopology):
numa_topology = numa_topology.to_xml()
return '''<capabilities>
<host>
<uuid>cef19ce0-0ca2-11df-855d-b19fbce37686</uuid>
<cpu>
<arch>x86_64</arch>
<model>Penryn</model>
<vendor>Intel</vendor>
<topology sockets='%(sockets)s' cores='%(cores)s' threads='%(threads)s'/>
<feature name='xtpr'/>
<feature name='tm2'/>
<feature name='est'/>
<feature name='vmx'/>
<feature name='ds_cpl'/>
<feature name='monitor'/>
<feature name='pbe'/>
<feature name='tm'/>
<feature name='ht'/>
<feature name='ss'/>
<feature name='acpi'/>
<feature name='ds'/>
<feature name='vme'/>
</cpu>
<migration_features>
<live/>
<uri_transports>
<uri_transport>tcp</uri_transport>
</uri_transports>
</migration_features>
%(topology)s
<secmodel>
<model>apparmor</model>
<doi>0</doi>
</secmodel>
</host>
<guest>
<os_type>hvm</os_type>
<arch name='i686'>
<wordsize>32</wordsize>
<emulator>/usr/bin/qemu</emulator>
<machine>pc-0.14</machine>
<machine canonical='pc-0.14'>pc</machine>
<machine>pc-0.13</machine>
<machine>pc-0.12</machine>
<machine>pc-0.11</machine>
<machine>pc-0.10</machine>
<machine>isapc</machine>
<domain type='qemu'>
</domain>
<domain type='kvm'>
<emulator>/usr/bin/kvm</emulator>
<machine>pc-0.14</machine>
<machine canonical='pc-0.14'>pc</machine>
<machine>pc-0.13</machine>
<machine>pc-0.12</machine>
<machine>pc-0.11</machine>
<machine>pc-0.10</machine>
<machine>isapc</machine>
</domain>
</arch>
<features>
<cpuselection/>
<deviceboot/>
<pae/>
<nonpae/>
<acpi default='on' toggle='yes'/>
<apic default='on' toggle='no'/>
</features>
</guest>
<guest>
<os_type>hvm</os_type>
<arch name='x86_64'>
<wordsize>64</wordsize>
<emulator>/usr/bin/qemu-system-x86_64</emulator>
<machine>pc-0.14</machine>
<machine canonical='pc-0.14'>pc</machine>
<machine>pc-0.13</machine>
<machine>pc-0.12</machine>
<machine>pc-0.11</machine>
<machine>pc-0.10</machine>
<machine>isapc</machine>
<domain type='qemu'>
</domain>
<domain type='kvm'>
<emulator>/usr/bin/kvm</emulator>
<machine>pc-0.14</machine>
<machine canonical='pc-0.14'>pc</machine>
<machine>pc-0.13</machine>
<machine>pc-0.12</machine>
<machine>pc-0.11</machine>
<machine>pc-0.10</machine>
<machine>isapc</machine>
</domain>
</arch>
<features>
<cpuselection/>
<deviceboot/>
<acpi default='on' toggle='yes'/>
<apic default='on' toggle='no'/>
</features>
</guest>
<guest>
<os_type>hvm</os_type>
<arch name='armv7l'>
<wordsize>32</wordsize>
<emulator>/usr/bin/qemu-system-arm</emulator>
<machine>integratorcp</machine>
<machine>vexpress-a9</machine>
<machine>syborg</machine>
<machine>musicpal</machine>
<machine>mainstone</machine>
<machine>n800</machine>
<machine>n810</machine>
<machine>n900</machine>
<machine>cheetah</machine>
<machine>sx1</machine>
<machine>sx1-v1</machine>
<machine>beagle</machine>
<machine>beaglexm</machine>
<machine>tosa</machine>
<machine>akita</machine>
<machine>spitz</machine>
<machine>borzoi</machine>
<machine>terrier</machine>
<machine>connex</machine>
<machine>verdex</machine>
<machine>lm3s811evb</machine>
<machine>lm3s6965evb</machine>
<machine>realview-eb</machine>
<machine>realview-eb-mpcore</machine>
<machine>realview-pb-a8</machine>
<machine>realview-pbx-a9</machine>
<machine>versatilepb</machine>
<machine>versatileab</machine>
<domain type='qemu'>
</domain>
</arch>
<features>
<deviceboot/>
</features>
</guest>
<guest>
<os_type>hvm</os_type>
<arch name='mips'>
<wordsize>32</wordsize>
<emulator>/usr/bin/qemu-system-mips</emulator>
<machine>malta</machine>
<machine>mipssim</machine>
<machine>magnum</machine>
<machine>pica61</machine>
<machine>mips</machine>
<domain type='qemu'>
</domain>
</arch>
<features>
<deviceboot/>
</features>
</guest>
<guest>
<os_type>hvm</os_type>
<arch name='mipsel'>
<wordsize>32</wordsize>
<emulator>/usr/bin/qemu-system-mipsel</emulator>
<machine>malta</machine>
<machine>mipssim</machine>
<machine>magnum</machine>
<machine>pica61</machine>
<machine>mips</machine>
<domain type='qemu'>
</domain>
</arch>
<features>
<deviceboot/>
</features>
</guest>
<guest>
<os_type>hvm</os_type>
<arch name='sparc'>
<wordsize>32</wordsize>
<emulator>/usr/bin/qemu-system-sparc</emulator>
<machine>SS-5</machine>
<machine>leon3_generic</machine>
<machine>SS-10</machine>
<machine>SS-600MP</machine>
<machine>SS-20</machine>
<machine>Voyager</machine>
<machine>LX</machine>
<machine>SS-4</machine>
<machine>SPARCClassic</machine>
<machine>SPARCbook</machine>
<machine>SS-1000</machine>
<machine>SS-2000</machine>
<machine>SS-2</machine>
<domain type='qemu'>
</domain>
</arch>
</guest>
<guest>
<os_type>hvm</os_type>
<arch name='ppc'>
<wordsize>32</wordsize>
<emulator>/usr/bin/qemu-system-ppc</emulator>
<machine>g3beige</machine>
<machine>virtex-ml507</machine>
<machine>mpc8544ds</machine>
<machine canonical='bamboo-0.13'>bamboo</machine>
<machine>bamboo-0.13</machine>
<machine>bamboo-0.12</machine>
<machine>ref405ep</machine>
<machine>taihu</machine>
<machine>mac99</machine>
<machine>prep</machine>
<domain type='qemu'>
</domain>
</arch>
<features>
<deviceboot/>
</features>
</guest>
</capabilities>''' % {'sockets': self.host_info.cpu_sockets,
'cores': self.host_info.cpu_cores,
'threads': self.host_info.cpu_threads,
'topology': numa_topology}
def compareCPU(self, xml, flags):
tree = etree.fromstring(xml)
arch_node = tree.find('./arch')
if arch_node is not None:
if arch_node.text not in [obj_fields.Architecture.X86_64,
obj_fields.Architecture.I686]:
return VIR_CPU_COMPARE_INCOMPATIBLE
model_node = tree.find('./model')
if model_node is not None:
if model_node.text != self.host_info.cpu_model:
return VIR_CPU_COMPARE_INCOMPATIBLE
vendor_node = tree.find('./vendor')
if vendor_node is not None:
if vendor_node.text != self.host_info.cpu_vendor:
return VIR_CPU_COMPARE_INCOMPATIBLE
# The rest of the stuff libvirt implements is rather complicated
# and I don't think it adds much value to replicate it here.
return VIR_CPU_COMPARE_IDENTICAL
def getCPUStats(self, cpuNum, flag):
if cpuNum < 2:
return {'kernel': 5664160000000,
'idle': 1592705190000000,
'user': 26728850000000,
'iowait': 6121490000000}
else:
raise make_libvirtError(
libvirtError,
"invalid argument: Invalid cpu number",
error_code=VIR_ERR_INTERNAL_ERROR,
error_domain=VIR_FROM_QEMU)
def nwfilterLookupByName(self, name):
try:
return self._nwfilters[name]
except KeyError:
raise make_libvirtError(
libvirtError,
"no nwfilter with matching name %s" % name,
error_code=VIR_ERR_NO_NWFILTER,
error_domain=VIR_FROM_NWFILTER)
def nwfilterDefineXML(self, xml):
nwfilter = NWFilter(self, xml)
self._add_filter(nwfilter)
def device_lookup_by_name(self, dev_name):
return self.pci_info.get_device_by_name(dev_name)
def nodeDeviceLookupByName(self, name):
if name.startswith('mdev'):
return self.mdev_info.get_device_by_name(name)
pci_dev = self.pci_info.get_device_by_name(name)
if pci_dev:
return pci_dev
try:
return self._nodedevs[name]
except KeyError:
raise make_libvirtError(
libvirtError,
"no nodedev with matching name %s" % name,
error_code=VIR_ERR_NO_NODE_DEVICE,
error_domain=VIR_FROM_NODEDEV)
def listDevices(self, cap, flags):
if cap == 'pci':
return self.pci_info.get_all_devices()
if cap == 'mdev':
return self.mdev_info.get_all_devices()
if cap == 'mdev_types':
# TODO(gibi): We should return something like
# https://libvirt.org/drvnodedev.html#MDEVCap but I tried and it
# did not work for me.
return None
else:
raise ValueError('Capability "%s" is not supported' % cap)
def baselineCPU(self, cpu, flag):
"""Add new libvirt API."""
return """<cpu mode='custom' match='exact'>
<model>Penryn</model>
<vendor>Intel</vendor>
<feature name='xtpr'/>
<feature name='tm2'/>
<feature name='est'/>
<feature name='vmx'/>
<feature name='ds_cpl'/>
<feature name='monitor'/>
<feature name='pbe'/>
<feature name='tm'/>
<feature name='ht'/>
<feature name='ss'/>
<feature name='acpi'/>
<feature name='ds'/>
<feature name='vme'/>
<feature policy='require' name='aes'/>
</cpu>"""
def secretLookupByUsage(self, usage_type_obj, usage_id):
pass
def secretDefineXML(self, xml):
pass
def openAuth(uri, auth, flags=0):
if type(auth) != list:
raise Exception("Expected a list for 'auth' parameter")
if type(auth[0]) != list:
raise Exception("Expected a function in 'auth[0]' parameter")
if not callable(auth[1]):
raise Exception("Expected a function in 'auth[1]' parameter")
return Connection(uri, (flags == VIR_CONNECT_RO))
def virEventRunDefaultImpl():
time.sleep(1)
def virEventRegisterDefaultImpl():
if connection_used:
raise Exception("virEventRegisterDefaultImpl() must be "
"called before connection is used.")
def registerErrorHandler(handler, ctxt):
pass
def make_libvirtError(error_class, msg, error_code=None,
error_domain=None, error_message=None,
error_level=None, str1=None, str2=None, str3=None,
int1=None, int2=None):
"""Convenience function for creating `libvirtError` exceptions which
allow you to specify arguments in constructor without having to manipulate
the `err` tuple directly.
We need to pass in `error_class` to this function because it may be
`libvirt.libvirtError` or `fakelibvirt.libvirtError` depending on whether
`libvirt-python` is installed.
"""
exc = error_class(msg)
exc.err = (error_code, error_domain, error_message, error_level,
str1, str2, str3, int1, int2)
return exc
virDomain = Domain
virNodeDevice = NodeDevice
virConnect = Connection
class FakeLibvirtFixture(fixtures.Fixture):
"""Performs global setup/stubbing for all libvirt tests.
"""
def __init__(self, stub_os_vif=True):
self.stub_os_vif = stub_os_vif
def setUp(self):
super(FakeLibvirtFixture, self).setUp()
# Some modules load the libvirt library in a strange way
for module in ('driver', 'host', 'guest', 'firewall', 'migration'):
i = 'nova.virt.libvirt.{module}.libvirt'.format(module=module)
# NOTE(mdbooth): The strange incantation below means 'this module'
self.useFixture(fixtures.MonkeyPatch(i, sys.modules[__name__]))
self.useFixture(
fixtures.MockPatch('nova.virt.libvirt.utils.get_fs_info'))
# libvirt driver needs to call out to the filesystem to get the
# parent_ifname for the SRIOV VFs.
self.useFixture(fixtures.MockPatch(
'nova.pci.utils.get_ifname_by_pci_address',
return_value='fake_pf_interface_name'))
# Don't assume that the system running tests has a valid machine-id
self.useFixture(fixtures.MockPatch(
'nova.virt.libvirt.driver.LibvirtDriver'
'._get_host_sysinfo_serial_os', return_value=uuids.machine_id))
disable_event_thread(self)
if self.stub_os_vif:
# Make sure to never try and actually plug/unplug VIFs in os-vif
# unless we're explicitly testing that code and the test itself
# will handle the appropriate mocking.
self.useFixture(fixtures.MonkeyPatch(
'nova.virt.libvirt.vif.LibvirtGenericVIFDriver._plug_os_vif',
lambda *a, **kw: None))
self.useFixture(fixtures.MonkeyPatch(
'nova.virt.libvirt.vif.LibvirtGenericVIFDriver._unplug_os_vif',
lambda *a, **kw: None))
# os_vif.initialize is typically done in nova-compute startup
# even if we are not planning to plug anything with os_vif in the test
# we still need the object model initialized to be able to generate
# guest config xml properly
import os_vif
os_vif.initialize()
|
import datetime
startDate = datetime.date(2020,8,17)
delta = datetime.timedelta(days=270)
endDate = startDate + delta
print(endDate)
|
# Copyright 2017 Bloomberg Finance L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from datetime import datetime
from .pod import Pod
class K8sInventory():
""" Kubernetes inventory - deal with namespaces, deployments and pods.
Also manages cache.
"""
def __init__(self, k8s_client, logger=None):
self.k8s_client = k8s_client
self._cache_namespaces = []
self._cache_last = None
self.logger = logger or logging.getLogger(__name__)
self.last_pods = []
def is_fresh(self, when):
""" Helper to invalidate the cache.
"""
delta = datetime.now() - when
if delta.seconds > 10:
return False
return True
def find_namespaces(self):
""" Returns all namespaces.
"""
if self._cache_last is not None and self.is_fresh(self._cache_last):
self.logger.info("Using cached namespaces")
return self._cache_namespaces
self.logger.info("Reading kubernetes namespaces")
namespaces = [
item.metadata.name for item in self.k8s_client.list_namespaces()
]
self._cache_namespaces = namespaces
self._cache_last = datetime.now()
return namespaces
def find_deployments(self, namespace=None, labels=None):
""" Find deployments for a namespace (default to "default" namespace).
"""
namespace = namespace or "default"
return [
item.metadata.name
for item in self.k8s_client.list_deployments(
namespace=namespace,
labels=labels,
)
]
def delete_pod(self, namespace=None, name=None):
""" Delete a pod from namespace
"""
self.k8s_client.delete_pod(
namespace=namespace,
name=name
)
def find_pods(self, namespace, selector=None, deployment_name=None):
""" Find pods in a namespace, for a deployment or selector.
"""
namespace = namespace or "default"
pods = self.k8s_client.list_pods(
namespace=namespace,
selector=selector,
deployment_name=deployment_name,
)
pod_objects = [
Pod(
num=i,
name=item.metadata.name,
namespace=item.metadata.namespace,
uid=item.metadata.uid,
host_ip=item.status.host_ip,
ip=item.status.pod_ip,
container_ids=[
status.container_id
for status in item.status.container_statuses
] if item.status.container_statuses else [],
state=item.status.phase,
labels=item.metadata.labels,
meta=item,
) for i, item in enumerate(pods)
] if pods else []
self.last_pods = pod_objects
return pod_objects
|
#!/usr/bin/env python3
from Crypto.Cipher import AES
key = b"\x71\x96\xab\xa1\x5d\x50\x37\x04\xfe\x2e\xf8\x14\xad\xbc\x4a\xb3"
data = [
b"\xbe\x43\x1a\x3a\x1a\xc7\x93\xee\x5a\x7f\x77\x3c\x6e\x51\x0c\x20",
b"\xec\x7b\x87\x2c\xcd\x83\x3d\xaa\x96\xb2\x63\xbc\x21\x62\x94\x42",
]
iv = b"\x00" * 16
aes = AES.new(key, AES.MODE_CBC, iv)
decrypted_data = aes.decrypt(data[0])
iv = data[0]
aes = AES.new(key, AES.MODE_CBC, iv)
decrypted_data += aes.decrypt(data[1])
print(decrypted_data)
|
# NOTE: training using SumTreeReplayBuffer fails to converge
# Source: https://raw.githubusercontent.com/rlcode/per/master/SumTree.py
import numpy
# SumTree
# a binary tree data structure where the parent’s value is the sum of its children
import torch
from src.v2_dqn.ReplayBuffer import Experience, device
class SumTree:
write = 0
def __init__(self, capacity):
self.capacity = capacity
self.tree = numpy.zeros(2 * capacity - 1)
self.data = numpy.zeros(capacity, dtype=object)
self.n_entries = 0
# update to the root node
def _propagate(self, idx, change):
### BUGFIX: Maximum recursion depth exceeded for idx == 99,999- James McGuigan
# parent = (idx - 1) // 2
# self.tree[parent] += change
# if parent != 0:
# self._propagate(parent, change)
# BUGFIX: unroll recursion - James McGuigan
while True:
parent = (idx - 1) // 2
self.tree[parent] += change
if parent > 0: idx = parent
elif parent <= 0: break
# find sample on leaf node
def _retrieve(self, idx, s):
left = 2 * idx + 1
right = left + 1
if left >= len(self.tree):
return idx
if s <= self.tree[left]:
return self._retrieve(left, s)
else:
return self._retrieve(right, s - self.tree[left])
def total(self):
return self.tree[0]
# store priority and sample
def add(self, p, data):
idx = self.write + self.capacity - 1
self.data[self.write] = data
self.update(idx, p)
self.write += 1
if self.write >= self.capacity:
self.write = 0
if self.n_entries < self.capacity:
self.n_entries += 1
# update priority
def update(self, idx, p):
change = p - self.tree[idx]
self.tree[idx] = p
self._propagate(idx, change)
# get priority and sample
def get(self, s):
idx = self._retrieve(0, s)
dataIdx = idx - self.capacity + 1
return (idx, self.tree[idx], self.data[dataIdx])
# Source: https://github.com/rlcode/per/blob/master/prioritized_memory.py
import random
import numpy as np
# from SumTree import SumTree
class SumTreeMemory: # stored as ( s, a, r, s_ ) in SumTree
e = 0.01
a = 0.6
beta = 0.4
beta_increment_per_sampling = 0.001
def __init__(self, capacity):
self.tree = SumTree(capacity)
self.capacity = capacity
def _get_priority(self, error):
return (np.abs(error) + self.e) ** self.a
def add(self, error, sample):
p = self._get_priority(error)
self.tree.add(p, sample)
def sample(self, n):
batch = []
idxs = []
segment = self.tree.total() / n
priorities = []
self.beta = np.min([1., self.beta + self.beta_increment_per_sampling])
for i in range(n):
a = segment * i
b = segment * (i + 1)
s = random.uniform(a, b)
(idx, p, data) = self.tree.get(s)
priorities.append(p)
batch.append(data)
idxs.append(idx)
sampling_probabilities = priorities / self.tree.total()
is_weight = np.power(self.tree.n_entries * sampling_probabilities, -self.beta)
is_weight /= is_weight.max()
return batch, idxs, is_weight
def update(self, idx, error):
p = self._get_priority(error)
self.tree.update(idx, p)
### Shared Interface with ReplayBuffer
class SumTreeReplayBuffer(SumTreeMemory): # stored as ( s, a, r, s_ ) in SumTree
"""
Wrapper class around SumTreeMemory to provide a common interface with ReplayBuffer
@Author James McGuigan
"""
def __init__(self, action_size, buffer_size, batch_size, seed=0):
self.action_size = action_size # unused
self.batch_size = batch_size
self.buffer_size = buffer_size
self.seed = seed # unused
super(SumTreeReplayBuffer, self).__init__(buffer_size)
def __len__(self):
return self.tree.n_entries
def sample(self):
# SumTreeMemory may return 0 for samples that have not been initialized, so filter them out
experiences = []
idxs = []
_experiences, _idxs, is_weight = super(SumTreeReplayBuffer, self).sample(self.batch_size)
for experience, idx in zip(_experiences, _idxs):
if isinstance(experience, tuple):
experiences.append(experience)
idxs.append(idx)
if len(idxs) == self.batch_size: break
if len(experiences) == 0:
return None
states = torch.from_numpy(np.vstack([e.state for e in experiences if e is not None])).float().to(device)
actions = torch.from_numpy(np.vstack([e.action for e in experiences if e is not None])).long().to(device)
rewards = torch.from_numpy(np.vstack([e.reward for e in experiences if e is not None])).float().to(device)
next_states = torch.from_numpy(np.vstack([e.next_state for e in experiences if e is not None])).float().to(device)
dones = torch.from_numpy(np.vstack([e.done for e in experiences if e is not None]).astype(np.uint8)).float().to(device)
idxs = torch.from_numpy(np.vstack([idx for idx in idxs if idx is not None]).astype(np.uint8)).float().to(device)
return (states, actions, rewards, next_states, dones, idxs)
def add(self, state, action, reward, next_state, done, idx, error=0):
# QUESTION: should error be initialized to 0?
sample = Experience(state, action, reward, next_state, done, idx)
super(SumTreeReplayBuffer, self).add(error, sample)
def update(self, experiences, td_errors):
states, actions, rewards, next_states, dones, idxs = experiences
assert len(idxs) == len(td_errors)
for i in range(len(idxs)):
idx = int(idxs[i])
td_error = float(td_errors[i])
super(SumTreeReplayBuffer, self).update(idx, td_error)
|
'use strict'
const {
getNamedType,
print,
parse,
Kind
} = require('graphql')
const kEntityResolvers = Symbol('mercurius.entity-resolvers')
function getFieldType (schema, type, fieldName) {
return getNamedType(schema.getType(type).getFields()[fieldName].type)
}
function getDirectiveSelection (node, directiveName) {
if (!node || !node.astNode) {
return []
}
const directive = node.astNode.directives.find(directive => directive.name.value === directiveName)
if (!directive) {
return []
}
const query = parse(`{ ${directive.arguments[0].value.value} }`)
return query.definitions[0].selectionSet.selections
}
function removeNonServiceTypeFields (selections, service, type, schema) {
return [
...selections.filter(selection => selection.kind === Kind.INLINE_FRAGMENT || selection.kind === Kind.FRAGMENT_SPREAD || service.typeMap[type].has(selection.name.value)).map(selection => {
if (selection.selectionSet && selection.selectionSet.selections && !(selection.kind === Kind.INLINE_FRAGMENT)) {
const fieldType = getFieldType(schema, type, selection.name.value)
const requiredFields = []
if (fieldType.getFields) {
for (const field of Object.values(fieldType.getFields())) {
requiredFields.push(...getDirectiveSelection(field, 'requires'))
}
}
return {
...selection,
selectionSet: {
kind: Kind.SELECTION_SET,
selections: removeNonServiceTypeFields([...selection.selectionSet.selections, ...requiredFields], service, fieldType, schema)
}
}
}
return selection
}),
{
kind: Kind.FIELD,
name: {
kind: Kind.NAME,
value: '__typename'
},
arguments: [],
directives: []
},
...getDirectiveSelection(type, 'key')
]
}
function createQueryOperation ({ fieldName, selections, variableDefinitions, args, fragments, operation }) {
return {
kind: Kind.DOCUMENT,
definitions: [{
kind: Kind.OPERATION_DEFINITION,
operation,
name: {
kind: Kind.NAME,
value: `Query_${fieldName}`
},
variableDefinitions,
directives: [],
selectionSet: {
kind: Kind.SELECTION_SET,
selections: [{
kind: Kind.FIELD,
name: {
kind: Kind.NAME,
value: fieldName
},
arguments: args,
directives: [],
selectionSet: {
kind: Kind.SELECTION_SET,
selections
}
}]
}
}]
}
}
function createEntityReferenceResolverOperation ({ returnType, selections, variableDefinitions }) {
return {
kind: Kind.DOCUMENT,
definitions: [{
kind: Kind.OPERATION_DEFINITION,
operation: 'query',
name: {
kind: Kind.NAME,
value: 'EntitiesQuery'
},
variableDefinitions: [
...variableDefinitions,
{
kind: Kind.VARIABLE_DEFINITION,
variable: {
kind: Kind.VARIABLE,
name: {
kind: Kind.NAME,
value: 'representations'
}
},
type: {
kind: Kind.NON_NULL_TYPE,
type: {
kind: Kind.LIST_TYPE,
type: {
kind: Kind.NON_NULL_TYPE,
type: {
kind: Kind.NAMED_TYPE,
name: {
kind: Kind.NAME,
value: '_Any'
}
}
}
}
},
directives: []
}
],
directives: [],
selectionSet: {
kind: Kind.SELECTION_SET,
selections: [{
kind: Kind.FIELD,
name: {
kind: Kind.NAME,
value: '_entities'
},
arguments: [
{
kind: Kind.ARGUMENT,
name: {
kind: Kind.NAME,
value: 'representations'
},
value: {
kind: Kind.VARIABLE,
name: {
kind: Kind.NAME,
value: 'representations'
}
}
}
],
directives: [],
selectionSet: {
kind: Kind.SELECTION_SET,
selections: [
{
kind: Kind.FIELD,
name: {
kind: Kind.NAME,
value: '__typename'
},
arguments: [],
directives: []
},
{
kind: Kind.INLINE_FRAGMENT,
typeCondition: {
kind: Kind.NAMED_TYPE,
name: {
kind: Kind.NAME,
value: returnType
}
},
directives: [],
selectionSet: {
kind: Kind.SELECTION_SET,
selections
}
}
]
}
}]
}
}]
}
}
function createFieldResolverOperation ({ parentType, fieldName, selections, variableDefinitions }) {
return createEntityReferenceResolverOperation({
returnType: parentType,
variableDefinitions,
selections: [{
kind: Kind.FIELD,
name: {
kind: Kind.NAME,
value: fieldName
},
directives: [],
selectionSet: {
kind: Kind.SELECTION_SET,
selections
}
}]
})
}
function collectVariableNames (acc, fields) {
for (const field of fields) {
if (field.value.kind === Kind.VARIABLE) {
acc.push(field.value.name.value)
} else if (field.value.kind === Kind.OBJECT) {
collectVariableNames(acc, field.value.fields)
}
}
}
function collectArgumentNames (fieldNode) {
const argumentNames = []
if (fieldNode.arguments) {
for (const argument of fieldNode.arguments) {
/* istanbul ignore else if there is no arguments property we return empty array */
if (argument.value.kind === Kind.VARIABLE) {
argumentNames.push(argument.value.name.value)
} else if (argument.value.kind === Kind.OBJECT) {
collectVariableNames(argumentNames, argument.value.fields)
} else if (argument.value.kind === Kind.LIST) {
// TODO: Support GraphQL List
}
}
}
return argumentNames
}
function collectArgumentsWithVariableValues (selections) {
const argumentNames = []
for (const selection of selections) {
argumentNames.push(...collectArgumentNames(selection))
if (selection.selectionSet && selection.selectionSet.selections) {
argumentNames.push(...collectArgumentsWithVariableValues(selection.selectionSet.selections))
}
}
return argumentNames
}
function getFragmentNamesInSelection (selections) {
const fragmentsInSelection = []
for (const selection of selections) {
if (selection.kind === Kind.FRAGMENT_SPREAD) {
fragmentsInSelection.push(selection.name.value)
}
if (selection.selectionSet) {
fragmentsInSelection.push(...getFragmentNamesInSelection(selection.selectionSet.selections))
}
}
return fragmentsInSelection
}
function collectFragmentsToInclude (usedFragments, fragments, service, schema) {
const visitedFragments = new Set()
const result = []
for (const fragmentName of usedFragments) {
visitedFragments.add(fragmentName)
const fragment = fragments[fragmentName]
const selections = removeNonServiceTypeFields(fragment.selectionSet.selections, service, fragment.typeCondition.name.value, schema)
result.push({
...fragment,
selectionSet: {
kind: Kind.SELECTION_SET,
selections
}
})
const fragmentsInSelections = getFragmentNamesInSelection(selections).filter(fragmentName => !visitedFragments.has(fragmentName))
result.push(...collectFragmentsToInclude(fragmentsInSelections, fragments, service, schema))
}
return result
}
function generatePathKey (path) {
const keys = []
if (path.prev) {
keys.push(...generatePathKey(path.prev))
}
keys.push(path.key)
return keys
}
/**
* Creates a resolver function for a fields type
*
* There are 3 options:
* - Query field resolver: when the service of the type is null
* - Reference entity resolver: when the service of type defined the field on the type
* - Field entity resolver: when the field was added through type extension in the service of the field's type
*
*/
function makeResolver ({ service, createOperation, transformData, isQuery, isReference, isSubscription }) {
return function (parent, args, context, info) {
const {
fieldNodes,
returnType,
fieldName,
parentType,
operation: originalOperation,
variableValues,
fragments,
schema
} = info
if (isReference && !parent[fieldName]) return null
const resolverKey = generatePathKey(info.path).join('.').replace(/\d/g, '_IDX_')
const { reply, __currentQuery, lruGatewayResolvers, pubsub } = context
const cached = lruGatewayResolvers.get(`${__currentQuery}_${resolverKey}`)
let variableNamesToDefine
let query
// Get the actual type as the returnType can be NonNull or List as well
const type = getNamedType(returnType)
if (cached) {
variableNamesToDefine = cached.variableNamesToDefine
query = cached.query
} else {
// Remove items from selections that are not defined in the service
const selections = fieldNodes[0].selectionSet ? removeNonServiceTypeFields(fieldNodes[0].selectionSet.selections, service, type, schema) : []
// collect all variable names that are used in selection
variableNamesToDefine = new Set(collectArgumentsWithVariableValues(selections))
collectArgumentNames(fieldNodes[0]).map(argumentName => variableNamesToDefine.add(argumentName))
const variablesToDefine = originalOperation.variableDefinitions.filter(definition => variableNamesToDefine.has(definition.variable.name.value))
// create the operation that will be sent to the service
const operation = createOperation({
returnType: type,
parentType,
fieldName,
selections,
isQuery,
isReference,
variableDefinitions: variablesToDefine,
args: fieldNodes[0].arguments,
operation: originalOperation.operation
})
query = print(operation)
// check if fragments are used in the original query
const usedFragments = getFragmentNamesInSelection(selections)
const fragmentsToDefine = collectFragmentsToInclude(usedFragments, fragments, service, schema)
/* istanbul ignore else */
if (fragmentsToDefine.length > 0) {
const fragmentsIncluded = new Set()
for (const fragment of fragmentsToDefine) {
if (!fragmentsIncluded.has(fragment.name.value)) {
query += `\n${print(fragment)}`
fragmentsIncluded.add(fragment.name.value)
}
}
}
lruGatewayResolvers.set(`${__currentQuery}_${resolverKey}`, { query, variableNamesToDefine })
}
const variables = {}
// Add variables to payload
for (const [variableName, variableValue] of Object.entries(variableValues)) {
if (variableNamesToDefine.has(variableName)) {
variables[variableName] = variableValue
}
}
if (isReference) {
if (parent[fieldName] instanceof Array) {
variables.representations = parent[fieldName].map(ref => removeNonIdProperties(ref, type))
} else {
variables.representations = [removeNonIdProperties(parent[fieldName], type)]
}
} else if (!isQuery && !isSubscription) {
variables.representations = [{
...removeNonIdProperties(parent, parentType),
...getRequiredFields(parent, schema.getType(parentType).getFields()[fieldName])
}]
}
if (isSubscription) {
const subscriptionId = service.createSubscription(query, variables, pubsub.publish.bind(pubsub), context._connectionInit)
return pubsub.subscribe(`${service.name}_${subscriptionId}`)
}
if (isQuery) {
return service.sendRequest({
method: 'POST',
body: JSON.stringify({
query,
variables
}),
originalRequestHeaders: reply.request.headers
}).then(transformData)
}
return reply[kEntityResolvers][`${service.name}Entity`]({
query,
variables,
originalRequestHeaders: reply.request.headers
}).then(transformData)
}
}
function removeNonIdProperties (obj, type) {
const keyDirective = type.astNode.directives.find(d => d.name.value === 'key')
const idFields = keyDirective.arguments[0].value.value.split(' ')
const result = {
__typename: obj.__typename
}
for (const id of idFields) {
result[id] = obj[id]
}
return result
}
function getRequiredFields (obj, field) {
const requiresDirective = field.astNode.directives.find(d => d.name.value === 'requires')
const result = {}
if (!requiresDirective) {
return result
}
const requiredFields = requiresDirective.arguments[0].value.value.split(' ')
for (const requiredField of requiredFields) {
result[requiredField] = obj[requiredField]
}
return result
}
module.exports = {
makeResolver,
createQueryOperation,
createFieldResolverOperation,
createEntityReferenceResolverOperation,
kEntityResolvers
}
|
from __future__ import unicode_literals
from django.db import models
from pygments.lexers import get_lexer_by_name
from pygments.formatters.html import HtmlFormatter
from pygments import highlight
class Sensors(models.Model):
switch = 'SW'
output = 'OU'
slider = 'SL'
sensor_types_choices = (
(switch, 'On off switch'),
(output, 'Sensor output'),
(slider, 'Data slider'),
)
sensor_types = models.CharField(
max_length=2,
choices=sensor_types_choices,
default=switch,
)
category = models.ForeignKey('categories.Categories', related_name='category', on_delete=models.CASCADE, null=True)
controller = models.ForeignKey('controllers.Controllers', related_name='controller', on_delete=models.CASCADE, null=True)
title = models.CharField(max_length=200)
value = models.IntegerField(null=True)
gpio = models.IntegerField(null=True)
description = models.TextField()
created = models.DateField(auto_now_add=True)
user = models.ForeignKey('auth.User', on_delete=models.CASCADE, null=True)
def __str__(self):
return self.title
class Meta:
verbose_name = 'Sensor'
verbose_name_plural = 'Sensors'
ordering = ['-created']
|
import torch
import numpy as np
import matplotlib.pyplot as plt
import matplotlib as mpl
import pickle
import torch.utils.data as torchdata
import matplotlib.patches as mpatches
import colorcet
from pathlib import Path
from torch import nn
from torch.nn import functional as F
from alr.utils import savefig
from alr.data.datasets import Dataset
import os
os.chdir("/Users/harry/Documents/workspace/thesis/reports/09_imbalanced_classes")
DATA = "CIFAR"
include_noise = False
SAVE = False
minority_classes = {0, 4, 9}
_, test = Dataset.CIFAR10.get()
minority_idxs = []
majority_idxs = []
for idx, (_, y) in enumerate(test):
if y in minority_classes:
minority_idxs.append(idx)
else:
majority_idxs.append(idx)
def sort_files(files):
return sorted(files, key=lambda x: int(str(x).split("_")[-1][:-4]))
def score_sample_plot(
ax, scores, size, ylab, xlab, top=None, cifar_idx=10_000, no_svhn=False
):
if no_svhn:
scores = scores[:10_000]
if not top:
# take everything
top = len(scores)
idxs = np.argsort(scores)[::-1][:top]
cifar_minority_mask = np.isin(idxs, minority_idxs)
cifar_majority_mask = np.isin(idxs, majority_idxs)
if not no_svhn:
svhn_mask = idxs >= cifar_idx
svhn_counts = svhn_mask.sum()
else:
svhn_counts = 0
minority_counts = cifar_minority_mask.sum()
majority_counts = cifar_majority_mask.sum()
assert svhn_counts + minority_counts + majority_counts == top
ax.scatter(
np.nonzero(cifar_majority_mask)[0],
scores[idxs[cifar_majority_mask]],
alpha=1,
label="majority",
s=2,
)
if not no_svhn:
ax.scatter(
np.nonzero(svhn_mask)[0],
scores[idxs[svhn_mask]],
color="red",
label="SVHN",
s=2,
)
ax.scatter(
np.nonzero(cifar_minority_mask)[0],
scores[idxs[cifar_minority_mask]],
color="orange",
label="minority",
s=2,
)
ax.set_xlabel(xlab)
ax.set_ylabel(ylab)
ax.set_title(
f"Size {size} {svhn_counts/top:.2%} SVHN;\n{minority_counts/top:.2%}Minority; {majority_counts/top:.2%} Majority",
fontsize=8,
)
def itc(iteration, batch_size=400, initial=800):
# iteration to counts
return initial + (iteration - 1) * batch_size
def bald_range(ax, scores, size, xlabel, ylabel):
cifar_scores = scores[:10000]
svhn_scores = scores[10000:]
minority_scores = cifar_scores[minority_idxs]
majority_scores = cifar_scores[majority_idxs]
ax.boxplot([svhn_scores, minority_scores, majority_scores])
if xlabel:
ax.set_xticklabels(["SVHN", "Minority", "Majority"])
if ylabel:
ax.set_ylabel("BALD")
ax.set_title(f"Size {size}")
def entropy_range(ax, entropy, etype, size, xlabel, ylabel):
cifar_ent = entropy[:10000]
svhn_ent = entropy[10000:]
minority_scores = cifar_ent[minority_idxs]
majority_scores = cifar_ent[majority_idxs]
ax.boxplot([svhn_ent, minority_scores, majority_scores])
if xlabel:
ax.set_xticklabels(["SVHN", "Minority", "Majority"])
if ylabel:
ax.set_ylabel(etype)
ax.set_title(f"Size {size}")
def proportion_plot(ax, scores, etype, size, xlabel, ylabel, gt=True):
cifar = scores[:10_000]
svhn = scores[10_000:]
x = np.linspace(min(scores), max(scores), num=1000)
miy = []
may = []
sy = []
for i in x:
if gt:
miy.append((cifar[minority_idxs] >= i).mean())
may.append((cifar[majority_idxs] >= i).mean())
sy.append((svhn >= i).mean())
else:
miy.append((cifar[minority_idxs] <= i).mean())
may.append((cifar[majority_idxs] <= i).mean())
sy.append((svhn <= i).mean())
ax.plot(x, may, label=f"Majority")
ax.plot(x, miy, label="Minority", color="orange")
ax.plot(x, sy, label="SVHN", color="red")
if xlabel:
ax.set_xlabel(etype)
if ylabel:
ax.set_ylabel("Proportion")
ax.set_title(f"Size {size}")
dataset = "cifar"
root = Path(f"./data_files/cnn13_ens")
files = list(root.glob("*_accs.pkl"))
models = [m.name.split("_")[0] for m in files]
with open(root / "subset_idxs.pkl", "rb") as fp:
idxs = pickle.load(fp)
model = models[0]
files = sort_files(root.glob("rep_1*.pkl"))
scores_1 = []
scores_2 = []
ascores_1 = []
ascores_2 = []
predictive_entropy_1 = []
predictive_entropy_2 = []
apredictive_entropy_1 = []
apredictive_entropy_2 = []
average_entropy_1 = []
average_entropy_2 = []
aaverage_entropy_1 = []
aaverage_entropy_2 = []
confidence_1 = []
confidence_2 = []
class_1 = []
class_2 = []
map_containers = {
"bald_score": (scores_1,),
"bald_score2": (ascores_1,),
"predictive_entropy": (predictive_entropy_1,),
"predictive_entropy2": (apredictive_entropy_1,),
"average_entropy": (average_entropy_1,),
"average_entropy2": (aaverage_entropy_1,),
"confidence": (confidence_1,),
"class": (class_1,),
}
for f in files:
with open(f, "rb") as fp:
trials = pickle.load(fp)
# 'average_entropy', 'predictive_entropy', 'average_entropy2', 'predictive_entropy2',
# 'bald_score', 'bald_score2', 'confidence', 'class'
for k, containers in map_containers.items():
# assert len(trials) == len(containers)
for trial, container in zip([trials], containers):
cifar = trial[k][:10_000]
svhn = trial[k][10_000:][idxs]
noise = trial[k][36_032:]
if include_noise:
combined = np.r_[cifar, svhn, noise]
assert combined.shape == (20_020,)
else:
combined = np.r_[cifar, svhn]
assert combined.shape == (20_000,)
container.append(combined)
bald_scores = np.stack(scores_1)
proper_bald_score = np.stack(ascores_1)
predictive_entropy = np.stack(predictive_entropy_1)
proper_predictive_entropy = np.stack(apredictive_entropy_1)
average_entropy = np.stack(average_entropy_1)
proper_average_entropy = np.stack(aaverage_entropy_1)
confidence = np.stack(confidence_1)
classes = np.stack(class_1)
N = 6
interval = np.linspace(0, bald_scores.shape[0] - 1, num=N).astype(int)
fig_params = dict(nrows=2, ncols=3, figsize=(3 * 3, 2 * 3), sharex=True, sharey=True)
## Visual analysis
scores = bald_scores
pent = predictive_entropy
aent = average_entropy
conf = confidence
TOP = 50
fig, axes = plt.subplots(**fig_params)
axes = axes.flatten()
for i, ax in enumerate(axes):
bald_range(
axes[i],
scores[interval[i]],
size=itc(interval[i] + 1),
xlabel=(i >= fig_params["ncols"]),
ylabel=(i % fig_params["ncols"] == 0),
)
fig.suptitle(f"BALD scores")
savefig("/Users/harry/Documents/workspace/thesis/figures/4/rough_bald_dist.pdf")
fig, axes = plt.subplots(**fig_params)
axes = axes.flatten()
for i, ax in enumerate(axes):
proportion_plot(
axes[i],
pent[interval[i]],
etype="Predictive Entropy",
size=itc(interval[i] + 1),
xlabel=(i >= fig_params["ncols"]),
ylabel=(i % fig_params["ncols"] == 0),
gt=False,
)
axes[i].legend()
fig.suptitle(f"Predictive Entropy")
savefig("/Users/harry/Documents/workspace/thesis/figures/4/rough_pred_ent.pdf")
|
/**
* Copyright (c) 2014-present, Facebook, Inc. All rights reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow
*/
'use strict';
const path = require('path');
const {sync: spawnSync} = require('cross-spawn');
const skipOnWindows = require('../../../../scripts/skip_on_windows');
skipOnWindows.suite();
const JEST_RUNTIME = path.resolve(__dirname, '../../bin/jest-runtime.js');
const run = args =>
spawnSync(JEST_RUNTIME, args, {
cwd: process.cwd(),
encoding: 'utf8',
env: process.env,
});
describe('Runtime', () => {
describe('cli', () => {
it('fails with no path', () => {
const expectedOutput =
'Please provide a path to a script. (See --help for details)\n';
expect(run([]).stdout).toBe(expectedOutput);
});
it('displays script output', () => {
const scriptPath = path.resolve(__dirname, './test_root/logging.js');
expect(run([scriptPath, '--no-cache']).stdout).toMatch('Hello, world!\n');
});
it('always disables automocking', () => {
const scriptPath = path.resolve(__dirname, './test_root/logging.js');
const output = run([
scriptPath,
'--no-cache',
'--config=' +
JSON.stringify({
automock: true,
}),
]);
expect(output.stdout).toMatch('Hello, world!\n');
});
it('throws script errors', () => {
const scriptPath = path.resolve(__dirname, './test_root/throwing.js');
expect(run([scriptPath, '--no-cache']).stderr).toMatch(
'Error: throwing\n',
);
});
});
});
|
App({
onLaunch: function () {
//判断机型(适配iphoneX)
wx.getSystemInfo({
success: (res) => {
this.globalData.systemInfo = res;
if (res.model.search('iPhone X') != -1) {
this.globalData.isIphoneX = true
}
}
});
},
globalData: {
systemInfo: null,
userInfo: null,
version: "1.0.0",
isIphoneX: false
}
})
|
'use strict';
describe('clear-images', function() {
// TODO(ndhoule): Add tests
it('should pass a basic smoke test', function() {
require('../lib');
});
});
|
/* Test of <netinet/in.h> substitute.
Copyright (C) 2007, 2009-2013 Free Software Foundation, Inc.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>. */
/* Written by Bruno Haible <bruno@clisp.org>, 2007. */
#include <config.h>
#include <netinet/in.h>
int
main (void)
{
return 0;
}
|
#!/usr/bin/env pytest
# -*- coding: utf-8 -*-
###############################################################################
# $Id$
#
# Project: GDAL/OGR Test Suite
# Purpose: Test read/write functionality for NITF driver.
# Author: Frank Warmerdam <warmerdam@pobox.com>
#
###############################################################################
# Copyright (c) 2003, Frank Warmerdam <warmerdam@pobox.com>
# Copyright (c) 2008-2013, Even Rouault <even dot rouault at spatialys.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
###############################################################################
import copy
import os
import array
import struct
import shutil
from osgeo import gdal
from osgeo import osr
import gdaltest
import pytest
@pytest.fixture(scope='module')
def not_jpeg_9b():
import jpeg
jpeg.test_jpeg_1()
if gdaltest.jpeg_version == '9b':
pytest.skip()
def hex_string(s):
return "".join(hex(ord(c))[2:] for c in s)
###############################################################################
# Write/Read test of simple byte reference data.
def test_nitf_1():
tst = gdaltest.GDALTest('NITF', 'byte.tif', 1, 4672)
return tst.testCreateCopy()
###############################################################################
# Write/Read test of simple 16bit reference data.
def test_nitf_2():
tst = gdaltest.GDALTest('NITF', 'int16.tif', 1, 4672)
return tst.testCreateCopy()
###############################################################################
# Write/Read RGB image with lat/long georeferencing, and verify.
def test_nitf_3():
tst = gdaltest.GDALTest('NITF', 'rgbsmall.tif', 3, 21349)
return tst.testCreateCopy()
###############################################################################
# Test direction creation of an NITF file.
def nitf_create(creation_options, set_inverted_color_interp=True, createcopy=False):
drv = gdal.GetDriverByName('NITF')
try:
os.remove('tmp/test_create.ntf')
except OSError:
pass
if createcopy:
ds = gdal.GetDriverByName('MEM').Create('', 200, 100, 3, gdal.GDT_Byte)
else:
ds = drv.Create('tmp/test_create.ntf', 200, 100, 3, gdal.GDT_Byte,
creation_options)
ds.SetGeoTransform((100, 0.1, 0.0, 30.0, 0.0, -0.1))
if set_inverted_color_interp:
ds.GetRasterBand(1).SetRasterColorInterpretation(gdal.GCI_BlueBand)
ds.GetRasterBand(2).SetRasterColorInterpretation(gdal.GCI_GreenBand)
ds.GetRasterBand(3).SetRasterColorInterpretation(gdal.GCI_RedBand)
else:
ds.GetRasterBand(1).SetRasterColorInterpretation(gdal.GCI_RedBand)
ds.GetRasterBand(2).SetRasterColorInterpretation(gdal.GCI_GreenBand)
ds.GetRasterBand(3).SetRasterColorInterpretation(gdal.GCI_BlueBand)
my_list = list(range(200)) + list(range(20, 220)) + list(range(30, 230))
try:
raw_data = array.array('h', my_list).tobytes()
except:
# Python 2
raw_data = array.array('h', my_list).tostring()
for line in range(100):
ds.WriteRaster(0, line, 200, 1, raw_data,
buf_type=gdal.GDT_Int16,
band_list=[1, 2, 3])
if createcopy:
ds = drv.CreateCopy('tmp/test_create.ntf', ds,
options=creation_options)
ds = None
###############################################################################
# Test direction creation of an non-compressed NITF file.
def test_nitf_4():
return nitf_create(['ICORDS=G'])
###############################################################################
# Verify created file
def nitf_check_created_file(checksum1, checksum2, checksum3, set_inverted_color_interp=True):
ds = gdal.Open('tmp/test_create.ntf')
chksum = ds.GetRasterBand(1).Checksum()
chksum_expect = checksum1
assert chksum == chksum_expect, 'Did not get expected chksum for band 1'
chksum = ds.GetRasterBand(2).Checksum()
chksum_expect = checksum2
assert chksum == chksum_expect, 'Did not get expected chksum for band 2'
chksum = ds.GetRasterBand(3).Checksum()
chksum_expect = checksum3
assert chksum == chksum_expect, 'Did not get expected chksum for band 3'
geotransform = ds.GetGeoTransform()
assert geotransform[0] == pytest.approx(100, abs=0.1) and geotransform[1] == pytest.approx(0.1, abs=0.001) and geotransform[2] == pytest.approx(0, abs=0.001) and geotransform[3] == pytest.approx(30.0, abs=0.1) and geotransform[4] == pytest.approx(0, abs=0.001) and geotransform[5] == pytest.approx(-0.1, abs=0.001), \
'geotransform differs from expected'
if set_inverted_color_interp:
assert ds.GetRasterBand(1).GetRasterColorInterpretation() == gdal.GCI_BlueBand, \
'Got wrong color interpretation.'
assert ds.GetRasterBand(2).GetRasterColorInterpretation() == gdal.GCI_GreenBand, \
'Got wrong color interpretation.'
assert ds.GetRasterBand(3).GetRasterColorInterpretation() == gdal.GCI_RedBand, \
'Got wrong color interpretation.'
ds = None
###############################################################################
# Verify file created by nitf_4()
def test_nitf_5():
return nitf_check_created_file(32498, 42602, 38982)
###############################################################################
# Read existing NITF file. Verifies the new adjusted IGEOLO interp.
def test_nitf_6():
tst = gdaltest.GDALTest('NITF', 'nitf/rgb.ntf', 3, 21349)
return tst.testOpen(check_prj='WGS84',
check_gt=(-44.842029478458, 0.003503401360, 0,
-22.930748299319, 0, -0.003503401360))
###############################################################################
# NITF in-memory.
def test_nitf_7():
tst = gdaltest.GDALTest('NITF', 'rgbsmall.tif', 3, 21349)
return tst.testCreateCopy(vsimem=1)
###############################################################################
# Verify we can open an NSIF file, and get metadata including BLOCKA.
def test_nitf_8():
ds = gdal.Open('data/nitf/fake_nsif.ntf')
chksum = ds.GetRasterBand(1).Checksum()
chksum_expect = 12033
assert chksum == chksum_expect, 'Did not get expected chksum for band 1'
md = ds.GetMetadata()
assert md['NITF_FHDR'] == 'NSIF01.00', 'Got wrong FHDR value'
assert md['NITF_BLOCKA_BLOCK_INSTANCE_01'] == '01' and md['NITF_BLOCKA_BLOCK_COUNT'] == '01' and md['NITF_BLOCKA_N_GRAY_01'] == '00000' and md['NITF_BLOCKA_L_LINES_01'] == '01000' and md['NITF_BLOCKA_LAYOVER_ANGLE_01'] == '000' and md['NITF_BLOCKA_SHADOW_ANGLE_01'] == '000' and md['NITF_BLOCKA_FRLC_LOC_01'] == '+41.319331+020.078400' and md['NITF_BLOCKA_LRLC_LOC_01'] == '+41.317083+020.126072' and md['NITF_BLOCKA_LRFC_LOC_01'] == '+41.281634+020.122570' and md['NITF_BLOCKA_FRFC_LOC_01'] == '+41.283881+020.074924', \
'BLOCKA metadata has unexpected value.'
###############################################################################
# Create and read a JPEG encoded NITF file.
def test_nitf_9():
src_ds = gdal.Open('data/rgbsmall.tif')
ds = gdal.GetDriverByName('NITF').CreateCopy('tmp/nitf9.ntf', src_ds,
options=['IC=C3'])
src_ds = None
ds = None
ds = gdal.Open('tmp/nitf9.ntf')
(exp_mean, exp_stddev) = (65.9532, 46.9026375565)
(mean, stddev) = ds.GetRasterBand(1).ComputeBandStats()
assert exp_mean == pytest.approx(mean, abs=0.1) and exp_stddev == pytest.approx(stddev, abs=0.1), \
'did not get expected mean or standard dev.'
md = ds.GetMetadata('IMAGE_STRUCTURE')
assert md['COMPRESSION'] == 'JPEG', 'Did not get expected compression value.'
###############################################################################
# For esoteric reasons, createcopy from jpeg compressed nitf files can be
# tricky. Verify this is working.
def test_nitf_10():
src_ds = gdal.Open('tmp/nitf9.ntf')
expected_cs = src_ds.GetRasterBand(2).Checksum()
src_ds = None
assert expected_cs == 22296 or expected_cs == 22259
tst = gdaltest.GDALTest('NITF', '../tmp/nitf9.ntf', 2, expected_cs)
return tst.testCreateCopy()
###############################################################################
# Test 1bit file ... conveniently very small and easy to include! (#1854)
def test_nitf_11():
# From http://www.gwg.nga.mil/ntb/baseline/software/testfile/Nitfv2_1/i_3034c.ntf
tst = gdaltest.GDALTest('NITF', 'nitf/i_3034c.ntf', 1, 170)
return tst.testOpen()
###############################################################################
# Verify that TRE and CGM access via the metadata domain works.
def test_nitf_12():
ds = gdal.Open('data/nitf/fake_nsif.ntf')
mdTRE = ds.GetMetadata('TRE')
try: # NG bindings
blockA = ds.GetMetadataItem('BLOCKA', 'TRE')
except:
blockA = mdTRE['BLOCKA']
mdCGM = ds.GetMetadata('CGM')
try: # NG bindings
segmentCount = ds.GetMetadataItem('SEGMENT_COUNT', 'CGM')
except:
segmentCount = mdCGM['SEGMENT_COUNT']
ds = None
expectedBlockA = '010000001000000000 +41.319331+020.078400+41.317083+020.126072+41.281634+020.122570+41.283881+020.074924 '
assert mdTRE['BLOCKA'] == expectedBlockA, \
'did not find expected BLOCKA from metadata.'
assert blockA == expectedBlockA, 'did not find expected BLOCKA from metadata item.'
assert mdCGM['SEGMENT_COUNT'] == '0', \
'did not find expected SEGMENT_COUNT from metadata.'
assert segmentCount == '0', \
'did not find expected SEGMENT_COUNT from metadata item.'
###############################################################################
# Test creation of an NITF file in UTM Zone 11, Southern Hemisphere.
def test_nitf_13():
drv = gdal.GetDriverByName('NITF')
ds = drv.Create('tmp/test_13.ntf', 200, 100, 1, gdal.GDT_Byte,
['ICORDS=S'])
ds.SetGeoTransform((400000, 10, 0.0, 6000000, 0.0, -10))
ds.SetProjection('PROJCS["UTM Zone 11, Southern Hemisphere",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],TOWGS84[0,0,0,0,0,0,0],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9108"]],AUTHORITY["EPSG","4326"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-117],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["Meter",1]]')
my_list = list(range(200))
try:
raw_data = array.array('f', my_list).tobytes()
except:
# Python 2
raw_data = array.array('f', my_list).tostring()
for line in range(100):
ds.WriteRaster(0, line, 200, 1, raw_data,
buf_type=gdal.GDT_Int16,
band_list=[1])
ds = None
###############################################################################
# Verify previous file
def test_nitf_14():
ds = gdal.Open('tmp/test_13.ntf')
chksum = ds.GetRasterBand(1).Checksum()
chksum_expect = 55964
assert chksum == chksum_expect, 'Did not get expected chksum for band 1'
geotransform = ds.GetGeoTransform()
assert geotransform[0] == pytest.approx(400000, abs=.1) and geotransform[1] == pytest.approx(10, abs=0.001) and geotransform[2] == pytest.approx(0, abs=0.001) and geotransform[3] == pytest.approx(6000000, abs=.1) and geotransform[4] == pytest.approx(0, abs=0.001) and geotransform[5] == pytest.approx(-10, abs=0.001), \
'geotransform differs from expected'
prj = ds.GetProjectionRef()
assert prj.find('PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-117],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000]') != -1, \
'Coordinate system not UTM Zone 11, Southern Hemisphere'
ds = None
###############################################################################
# Test creating an in memory copy.
def test_nitf_15():
tst = gdaltest.GDALTest('NITF', 'byte.tif', 1, 4672)
return tst.testCreateCopy(vsimem=1)
###############################################################################
# Checks a 1-bit mono with mask table having (0x00) black as transparent with white arrow.
def test_nitf_16():
# From http://www.gwg.nga.mil/ntb/baseline/software/testfile/Nitfv2_1/ns3034d.nsf
tst = gdaltest.GDALTest('NITF', 'nitf/ns3034d.nsf', 1, 170)
return tst.testOpen()
###############################################################################
# Checks a 1-bit RGB/LUT (green arrow) with a mask table (pad pixels having value of 0x00)
# and a transparent pixel value of 1 being mapped to green by the LUT
def test_nitf_17():
# From http://www.gwg.nga.mil/ntb/baseline/software/testfile/Nitfv2_1/i_3034f.ntf
tst = gdaltest.GDALTest('NITF', 'nitf/i_3034f.ntf', 1, 170)
return tst.testOpen()
###############################################################################
# Test NITF file without image segment
def test_nitf_18():
# Shut up the warning about missing image segment
gdal.PushErrorHandler('CPLQuietErrorHandler')
# From http://www.gwg.nga.mil/ntb/baseline/software/testfile/Nitfv1_1/U_0006A.NTF
ds = gdal.Open("data/nitf/U_0006A.NTF")
gdal.PopErrorHandler()
assert ds.RasterCount == 0
###############################################################################
# Test BILEVEL (C1) decompression
def test_nitf_19():
# From http://www.gwg.nga.mil/ntb/baseline/software/testfile/Nitfv2_0/U_1050A.NTF
tst = gdaltest.GDALTest('NITF', 'nitf/U_1050A.NTF', 1, 65024)
return tst.testOpen()
###############################################################################
# Test NITF file consisting only of an header
def test_nitf_20():
# Shut up the warning about file either corrupt or empty
gdal.PushErrorHandler('CPLQuietErrorHandler')
# From http://www.gwg.nga.mil/ntb/baseline/software/testfile/Nitfv1_1/U_0002A.NTF
ds = gdal.Open("data/nitf/U_0002A.NTF")
gdal.PopErrorHandler()
assert ds is None
###############################################################################
# Verify that TEXT access via the metadata domain works.
#
# See also nitf_35 for writing TEXT segments.
def test_nitf_21():
# Shut up the warning about missing image segment
gdal.PushErrorHandler('CPLQuietErrorHandler')
ds = gdal.Open('data/nitf/ns3114a.nsf')
gdal.PopErrorHandler()
mdTEXT = ds.GetMetadata('TEXT')
try: # NG bindings
data0 = ds.GetMetadataItem('DATA_0', 'TEXT')
except:
data0 = mdTEXT['DATA_0']
ds = None
assert mdTEXT['DATA_0'] == 'A', 'did not find expected DATA_0 from metadata.'
assert data0 == 'A', 'did not find expected DATA_0 from metadata item.'
###############################################################################
# Write/Read test of simple int32 reference data.
def test_nitf_22():
tst = gdaltest.GDALTest('NITF', '../../gcore/data/int32.tif', 1, 4672)
return tst.testCreateCopy()
###############################################################################
# Write/Read test of simple float32 reference data.
def test_nitf_23():
tst = gdaltest.GDALTest('NITF', '../../gcore/data/float32.tif', 1, 4672)
return tst.testCreateCopy()
###############################################################################
# Write/Read test of simple float64 reference data.
def test_nitf_24():
tst = gdaltest.GDALTest('NITF', '../../gcore/data/float64.tif', 1, 4672)
return tst.testCreateCopy()
###############################################################################
# Write/Read test of simple uint16 reference data.
def test_nitf_25():
tst = gdaltest.GDALTest('NITF', '../../gcore/data/uint16.tif', 1, 4672)
return tst.testCreateCopy()
###############################################################################
# Write/Read test of simple uint32 reference data.
def test_nitf_26():
tst = gdaltest.GDALTest('NITF', '../../gcore/data/uint32.tif', 1, 4672)
return tst.testCreateCopy()
###############################################################################
# Test Create() with IC=NC compression, and multi-blocks
def test_nitf_27():
nitf_create(['ICORDS=G', 'IC=NC', 'BLOCKXSIZE=10', 'BLOCKYSIZE=10'])
return nitf_check_created_file(32498, 42602, 38982)
###############################################################################
# Test Create() with IC=C8 compression with the JP2ECW driver
def test_nitf_28_jp2ecw():
gdaltest.nitf_28_jp2ecw_is_ok = False
if gdal.GetDriverByName('JP2ECW') is None:
pytest.skip()
import ecw
if not ecw.has_write_support():
pytest.skip()
# Deregister other potential conflicting JPEG2000 drivers
gdaltest.deregister_all_jpeg2000_drivers_but('JP2ECW')
if nitf_create(['ICORDS=G', 'IC=C8', 'TARGET=75'], set_inverted_color_interp=False) == 'success':
ret = nitf_check_created_file(32398, 42502, 38882, set_inverted_color_interp=False)
if ret == 'success':
gdaltest.nitf_28_jp2ecw_is_ok = True
else:
ret = 'fail'
tmpfilename = '/vsimem/nitf_28_jp2ecw.ntf'
src_ds = gdal.GetDriverByName('MEM').Create('', 1025, 1025)
gdal.GetDriverByName('NITF').CreateCopy(tmpfilename, src_ds, options=['IC=C8'])
ds = gdal.Open(tmpfilename)
blockxsize, blockysize = ds.GetRasterBand(1).GetBlockSize()
ds = None
gdal.Unlink(tmpfilename)
if (blockxsize, blockysize) != (256, 256): # 256 since this is hardcoded as such in the ECW driver
gdaltest.post_reason('wrong block size')
print(blockxsize, blockysize)
ret = 'fail'
gdaltest.reregister_all_jpeg2000_drivers()
return ret
###############################################################################
# Test reading the previously create file with the JP2MrSID driver
def test_nitf_28_jp2mrsid():
if not gdaltest.nitf_28_jp2ecw_is_ok:
pytest.skip()
jp2mrsid_drv = gdal.GetDriverByName('JP2MrSID')
if jp2mrsid_drv is None:
pytest.skip()
# Deregister other potential conflicting JPEG2000 drivers
gdaltest.deregister_all_jpeg2000_drivers_but('JP2MrSID')
ret = nitf_check_created_file(32398, 42502, 38882, set_inverted_color_interp=False)
gdaltest.reregister_all_jpeg2000_drivers()
return ret
###############################################################################
# Test reading the previously create file with the JP2KAK driver
def test_nitf_28_jp2kak():
if not gdaltest.nitf_28_jp2ecw_is_ok:
pytest.skip()
jp2kak_drv = gdal.GetDriverByName('JP2KAK')
if jp2kak_drv is None:
pytest.skip()
# Deregister other potential conflicting JPEG2000 drivers
gdaltest.deregister_all_jpeg2000_drivers_but('JP2KAK')
ret = nitf_check_created_file(32398, 42502, 38882, set_inverted_color_interp=False)
gdaltest.reregister_all_jpeg2000_drivers()
return ret
###############################################################################
# Test reading the previously create file with the JP2KAK driver
def test_nitf_28_jp2openjpeg():
if not gdaltest.nitf_28_jp2ecw_is_ok:
pytest.skip()
drv = gdal.GetDriverByName('JP2OpenJPEG')
if drv is None:
pytest.skip()
# Deregister other potential conflicting JPEG2000 drivers
gdaltest.deregister_all_jpeg2000_drivers_but('JP2OpenJPEG')
ret = nitf_check_created_file(32398, 42502, 38882, set_inverted_color_interp=False)
gdaltest.reregister_all_jpeg2000_drivers()
return ret
###############################################################################
# Test Create() with IC=C8 compression with the JP2OpenJPEG driver
def test_nitf_28_jp2openjpeg_bis():
drv = gdal.GetDriverByName('JP2OpenJPEG')
if drv is None:
pytest.skip()
# Deregister other potential conflicting JPEG2000 drivers
gdaltest.deregister_all_jpeg2000_drivers_but('JP2OpenJPEG')
if nitf_create(['ICORDS=G', 'IC=C8', 'QUALITY=25'], set_inverted_color_interp=False, createcopy=True) == 'success':
ret = nitf_check_created_file(31604, 42782, 38791, set_inverted_color_interp=False)
else:
ret = 'fail'
tmpfilename = '/vsimem/nitf_28_jp2openjpeg_bis.ntf'
src_ds = gdal.GetDriverByName('MEM').Create('', 1025, 1025)
gdal.GetDriverByName('NITF').CreateCopy(tmpfilename, src_ds, options=['IC=C8'])
ds = gdal.Open(tmpfilename)
blockxsize, blockysize = ds.GetRasterBand(1).GetBlockSize()
ds = None
gdal.Unlink(tmpfilename)
if (blockxsize, blockysize) != (1024, 1024):
gdaltest.post_reason('wrong block size')
print(blockxsize, blockysize)
ret = 'fail'
gdaltest.reregister_all_jpeg2000_drivers()
return ret
###############################################################################
# Test Create() with a LUT
def test_nitf_29():
drv = gdal.GetDriverByName('NITF')
ds = drv.Create('tmp/test_29.ntf', 1, 1, 1, gdal.GDT_Byte,
['IREP=RGB/LUT', 'LUT_SIZE=128'])
ct = gdal.ColorTable()
ct.SetColorEntry(0, (255, 255, 255, 255))
ct.SetColorEntry(1, (255, 255, 0, 255))
ct.SetColorEntry(2, (255, 0, 255, 255))
ct.SetColorEntry(3, (0, 255, 255, 255))
ds.GetRasterBand(1).SetRasterColorTable(ct)
ds = None
ds = gdal.Open('tmp/test_29.ntf')
ct = ds.GetRasterBand(1).GetRasterColorTable()
assert (ct.GetCount() == 129 and \
ct.GetColorEntry(0) == (255, 255, 255, 255) and \
ct.GetColorEntry(1) == (255, 255, 0, 255) and \
ct.GetColorEntry(2) == (255, 0, 255, 255) and \
ct.GetColorEntry(3) == (0, 255, 255, 255)), 'Wrong color table entry.'
new_ds = drv.CreateCopy('tmp/test_29_copy.ntf', ds)
del new_ds
ds = None
ds = gdal.Open('tmp/test_29_copy.ntf')
ct = ds.GetRasterBand(1).GetRasterColorTable()
assert (ct.GetCount() == 130 and \
ct.GetColorEntry(0) == (255, 255, 255, 255) and \
ct.GetColorEntry(1) == (255, 255, 0, 255) and \
ct.GetColorEntry(2) == (255, 0, 255, 255) and \
ct.GetColorEntry(3) == (0, 255, 255, 255)), 'Wrong color table entry.'
ds = None
###############################################################################
# Verify we can write a file with BLOCKA TRE and read it back properly.
def test_nitf_30():
src_ds = gdal.Open('data/nitf/fake_nsif.ntf')
ds = gdal.GetDriverByName('NITF').CreateCopy('tmp/nitf30.ntf', src_ds)
chksum = ds.GetRasterBand(1).Checksum()
chksum_expect = 12033
assert chksum == chksum_expect, 'Did not get expected chksum for band 1'
md = ds.GetMetadata()
assert md['NITF_FHDR'] == 'NSIF01.00', 'Got wrong FHDR value'
assert md['NITF_BLOCKA_BLOCK_INSTANCE_01'] == '01' and md['NITF_BLOCKA_BLOCK_COUNT'] == '01' and md['NITF_BLOCKA_N_GRAY_01'] == '00000' and md['NITF_BLOCKA_L_LINES_01'] == '01000' and md['NITF_BLOCKA_LAYOVER_ANGLE_01'] == '000' and md['NITF_BLOCKA_SHADOW_ANGLE_01'] == '000' and md['NITF_BLOCKA_FRLC_LOC_01'] == '+41.319331+020.078400' and md['NITF_BLOCKA_LRLC_LOC_01'] == '+41.317083+020.126072' and md['NITF_BLOCKA_LRFC_LOC_01'] == '+41.281634+020.122570' and md['NITF_BLOCKA_FRFC_LOC_01'] == '+41.283881+020.074924', \
'BLOCKA metadata has unexpected value.'
ds = None
gdal.GetDriverByName('NITF').Delete('tmp/nitf30.ntf')
# Test overriding src BLOCKA metadata with NITF_BLOCKA creation options
gdal.GetDriverByName('NITF').CreateCopy('/vsimem/nitf30_override.ntf', src_ds,
options=['BLOCKA_BLOCK_INSTANCE_01=01',
'BLOCKA_BLOCK_COUNT=01',
'BLOCKA_N_GRAY_01=00000',
'BLOCKA_L_LINES_01=01000',
'BLOCKA_LAYOVER_ANGLE_01=000',
'BLOCKA_SHADOW_ANGLE_01=000',
'BLOCKA_FRLC_LOC_01=+42.319331+020.078400',
'BLOCKA_LRLC_LOC_01=+42.317083+020.126072',
'BLOCKA_LRFC_LOC_01=+42.281634+020.122570',
'BLOCKA_FRFC_LOC_01=+42.283881+020.074924'
])
ds = gdal.Open('/vsimem/nitf30_override.ntf')
md = ds.GetMetadata()
ds = None
gdal.GetDriverByName('NITF').Delete('/vsimem/nitf30_override.ntf')
assert md['NITF_BLOCKA_BLOCK_INSTANCE_01'] == '01' and md['NITF_BLOCKA_BLOCK_COUNT'] == '01' and md['NITF_BLOCKA_N_GRAY_01'] == '00000' and md['NITF_BLOCKA_L_LINES_01'] == '01000' and md['NITF_BLOCKA_LAYOVER_ANGLE_01'] == '000' and md['NITF_BLOCKA_SHADOW_ANGLE_01'] == '000' and md['NITF_BLOCKA_FRLC_LOC_01'] == '+42.319331+020.078400' and md['NITF_BLOCKA_LRLC_LOC_01'] == '+42.317083+020.126072' and md['NITF_BLOCKA_LRFC_LOC_01'] == '+42.281634+020.122570' and md['NITF_BLOCKA_FRFC_LOC_01'] == '+42.283881+020.074924', \
'BLOCKA metadata has unexpected value.'
# Test overriding src BLOCKA metadata with TRE=BLOCKA= creation option
gdal.GetDriverByName('NITF').CreateCopy('/vsimem/nitf30_override.ntf', src_ds,
options=['TRE=BLOCKA=010000001000000000 +42.319331+020.078400+42.317083+020.126072+42.281634+020.122570+42.283881+020.074924xxxxx'
])
ds = gdal.Open('/vsimem/nitf30_override.ntf')
md = ds.GetMetadata()
ds = None
gdal.GetDriverByName('NITF').Delete('/vsimem/nitf30_override.ntf')
assert md['NITF_BLOCKA_BLOCK_INSTANCE_01'] == '01' and md['NITF_BLOCKA_BLOCK_COUNT'] == '01' and md['NITF_BLOCKA_N_GRAY_01'] == '00000' and md['NITF_BLOCKA_L_LINES_01'] == '01000' and md['NITF_BLOCKA_LAYOVER_ANGLE_01'] == '000' and md['NITF_BLOCKA_SHADOW_ANGLE_01'] == '000' and md['NITF_BLOCKA_FRLC_LOC_01'] == '+42.319331+020.078400' and md['NITF_BLOCKA_LRLC_LOC_01'] == '+42.317083+020.126072' and md['NITF_BLOCKA_LRFC_LOC_01'] == '+42.281634+020.122570' and md['NITF_BLOCKA_FRFC_LOC_01'] == '+42.283881+020.074924', \
'BLOCKA metadata has unexpected value.'
# Test that gdal_translate -ullr doesn't propagate BLOCKA
gdal.Translate('/vsimem/nitf30_no_src_md.ntf', src_ds, format='NITF', outputBounds=[2, 49, 3, 50])
ds = gdal.Open('/vsimem/nitf30_no_src_md.ntf')
md = ds.GetMetadata()
ds = None
gdal.GetDriverByName('NITF').Delete('/vsimem/nitf30_no_src_md.ntf')
assert 'NITF_BLOCKA_BLOCK_INSTANCE_01' not in md, \
'unexpectdly found BLOCKA metadata.'
# Test USE_SRC_NITF_METADATA=NO
gdal.GetDriverByName('NITF').CreateCopy('/vsimem/nitf30_no_src_md.ntf', src_ds,
options=['USE_SRC_NITF_METADATA=NO'])
ds = gdal.Open('/vsimem/nitf30_no_src_md.ntf')
md = ds.GetMetadata()
ds = None
gdal.GetDriverByName('NITF').Delete('/vsimem/nitf30_no_src_md.ntf')
assert 'NITF_BLOCKA_BLOCK_INSTANCE_01' not in md, \
'unexpectdly found BLOCKA metadata.'
###############################################################################
# Verify we can write a file with a custom TRE and read it back properly.
def test_nitf_31():
nitf_create(['TRE=CUSTOM= Test TRE1\\0MORE',
'TRE=TOTEST=SecondTRE',
'ICORDS=G'])
ds = gdal.Open('tmp/test_create.ntf')
md = ds.GetMetadata('TRE')
assert len(md) == 2, 'Did not get expected TRE count'
# Check that the leading space in the CUSTOM metadata item is preserved (#3088, #3204)
try:
assert ds.GetMetadataItem('CUSTOM', 'TRE') == ' Test TRE1\\0MORE', \
'Did not get expected TRE contents'
except:
pass
assert md['CUSTOM'] == ' Test TRE1\\0MORE' and md['TOTEST'] == 'SecondTRE', \
'Did not get expected TRE contents'
ds = None
return nitf_check_created_file(32498, 42602, 38982)
###############################################################################
# Test Create() with ICORDS=D
def test_nitf_32():
nitf_create(['ICORDS=D'])
return nitf_check_created_file(32498, 42602, 38982)
###############################################################################
# Test Create() with ICORDS=D and a consistent BLOCKA
def test_nitf_33():
nitf_create(['ICORDS=D',
'BLOCKA_BLOCK_COUNT=01',
'BLOCKA_BLOCK_INSTANCE_01=01',
'BLOCKA_L_LINES_01=100',
'BLOCKA_FRLC_LOC_01=+29.950000+119.950000',
'BLOCKA_LRLC_LOC_01=+20.050000+119.950000',
'BLOCKA_LRFC_LOC_01=+20.050000+100.050000',
'BLOCKA_FRFC_LOC_01=+29.950000+100.050000'])
return nitf_check_created_file(32498, 42602, 38982)
###############################################################################
# Test CreateCopy() of a 16bit image with tiling
def test_nitf_34():
tst = gdaltest.GDALTest('NITF', 'n43.dt0', 1, 49187, options=['BLOCKSIZE=64'])
return tst.testCreateCopy()
###############################################################################
# Test CreateCopy() writing file with a text segment.
def test_nitf_35():
src_ds = gdal.Open('data/nitf/text_md.vrt')
ds = gdal.GetDriverByName('NITF').CreateCopy('tmp/nitf_35.ntf', src_ds)
src_ds = None
ds = None
ds = gdal.Open('tmp/nitf_35.ntf')
exp_text = """This is text data
with a newline."""
md = ds.GetMetadata('TEXT')
assert md['DATA_0'] == exp_text, 'Did not get expected TEXT metadata.'
exp_text = """Also, a second text segment is created."""
md = ds.GetMetadata('TEXT')
assert md['DATA_1'] == exp_text, 'Did not get expected TEXT metadata.'
ds = None
gdal.GetDriverByName('NITF').Delete('tmp/nitf_35.ntf')
###############################################################################
# Create and read a JPEG encoded NITF file (C3) with several blocks
# Check that statistics are persisted (#3985)
def test_nitf_36():
src_ds = gdal.Open('data/rgbsmall.tif')
ds = gdal.GetDriverByName('NITF').CreateCopy('tmp/nitf36.ntf', src_ds,
options=['IC=C3', 'BLOCKSIZE=32', 'QUALITY=100'])
src_ds = None
ds = None
ds = gdal.Open('tmp/nitf36.ntf')
assert ds.GetRasterBand(1).GetMinimum() is None, \
'Did not expect to have minimum value at that point.'
(_, _, mean, stddev) = ds.GetRasterBand(1).GetStatistics(False, False)
assert stddev < 0, 'Did not expect to have statistics at that point.'
(exp_mean, exp_stddev) = (65.4208, 47.254550335)
(_, _, mean, stddev) = ds.GetRasterBand(1).GetStatistics(False, True)
assert exp_mean == pytest.approx(mean, abs=0.1) and exp_stddev == pytest.approx(stddev, abs=0.1), \
'did not get expected mean or standard dev.'
md = ds.GetMetadata('IMAGE_STRUCTURE')
assert md['COMPRESSION'] == 'JPEG', 'Did not get expected compression value.'
ds = None
# Check that statistics are persisted (#3985)
ds = gdal.Open('tmp/nitf36.ntf')
assert ds.GetRasterBand(1).GetMinimum() is not None, \
'Should have minimum value at that point.'
(_, _, mean, stddev) = ds.GetRasterBand(1).GetStatistics(False, False)
assert exp_mean == pytest.approx(mean, abs=0.1) and exp_stddev == pytest.approx(stddev, abs=0.1), \
'Should have statistics at that point.'
ds = None
###############################################################################
# Create and read a NITF file with 69999 bands
def test_nitf_37():
ds = gdal.GetDriverByName('NITF').Create('tmp/nitf37.ntf', 1, 1, 69999)
ds = None
ds = gdal.Open('tmp/nitf37.ntf')
assert ds.RasterCount == 69999
ds = None
###############################################################################
# Create and read a NITF file with 999 images
def test_nitf_38():
ds = gdal.Open('data/byte.tif')
nXSize = ds.RasterXSize
nYSize = ds.RasterYSize
data = ds.GetRasterBand(1).ReadRaster(0, 0, nXSize, nYSize)
expected_cs = ds.GetRasterBand(1).Checksum()
ds = gdal.GetDriverByName('NITF').Create('tmp/nitf38.ntf', nXSize, nYSize, 1, options=['NUMI=999'])
ds = None
ds = gdal.Open('NITF_IM:998:tmp/nitf38.ntf', gdal.GA_Update)
ds.GetRasterBand(1).WriteRaster(0, 0, nXSize, nYSize, data)
# Create overviews
ds.BuildOverviews(overviewlist=[2])
ds = None
ds = gdal.Open('NITF_IM:0:tmp/nitf38.ntf')
assert ds.GetRasterBand(1).Checksum() == 0
ds = None
ds = gdal.Open('NITF_IM:998:tmp/nitf38.ntf')
cs = ds.GetRasterBand(1).Checksum()
assert cs == expected_cs, 'bad checksum for image of 998th subdataset'
# Check the overview
cs = ds.GetRasterBand(1).GetOverview(0).Checksum()
assert cs == 1087, 'bad checksum for overview of image of 998th subdataset'
out_ds = gdal.GetDriverByName('VRT').CreateCopy('tmp/nitf38.vrt', ds)
out_ds = None
ds = None
ds = gdal.Open('tmp/nitf38.vrt')
cs = ds.GetRasterBand(1).Checksum()
ds = None
gdal.Unlink('tmp/nitf38.vrt')
assert cs == expected_cs
ds = gdal.Open('NITF_IM:998:%s/tmp/nitf38.ntf' % os.getcwd())
out_ds = gdal.GetDriverByName('VRT').CreateCopy('%s/tmp/nitf38.vrt' % os.getcwd(), ds)
out_ds = None
ds = None
ds = gdal.Open('tmp/nitf38.vrt')
cs = ds.GetRasterBand(1).Checksum()
ds = None
gdal.Unlink('tmp/nitf38.vrt')
assert cs == expected_cs
ds = gdal.Open('NITF_IM:998:%s/tmp/nitf38.ntf' % os.getcwd())
out_ds = gdal.GetDriverByName('VRT').CreateCopy('tmp/nitf38.vrt', ds)
del out_ds
ds = None
ds = gdal.Open('tmp/nitf38.vrt')
cs = ds.GetRasterBand(1).Checksum()
ds = None
gdal.Unlink('tmp/nitf38.vrt')
assert cs == expected_cs
###############################################################################
# Create and read a JPEG encoded NITF file (M3) with several blocks
def test_nitf_39():
src_ds = gdal.Open('data/rgbsmall.tif')
ds = gdal.GetDriverByName('NITF').CreateCopy('tmp/nitf39.ntf', src_ds,
options=['IC=M3', 'BLOCKSIZE=32', 'QUALITY=100'])
src_ds = None
ds = None
ds = gdal.Open('tmp/nitf39.ntf')
(exp_mean, exp_stddev) = (65.4208, 47.254550335)
(mean, stddev) = ds.GetRasterBand(1).ComputeBandStats()
assert exp_mean == pytest.approx(mean, abs=0.1) and exp_stddev == pytest.approx(stddev, abs=0.1), \
'did not get expected mean or standard dev.'
md = ds.GetMetadata('IMAGE_STRUCTURE')
assert md['COMPRESSION'] == 'JPEG', 'Did not get expected compression value.'
ds = None
###############################################################################
# Create a 10 GB NITF file
def test_nitf_40():
# Determine if the filesystem supports sparse files (we don't want to create a real 10 GB
# file !
if not gdaltest.filesystem_supports_sparse_files('tmp'):
pytest.skip()
width = 99000
height = 99000
x = width - 1
y = height - 1
ds = gdal.GetDriverByName('NITF').Create('tmp/nitf40.ntf', width, height, options=['BLOCKSIZE=256'])
data = struct.pack('B' * 1, 123)
# Write a non NULL byte at the bottom right corner of the image (around 10 GB offset)
ds.GetRasterBand(1).WriteRaster(x, y, 1, 1, data)
ds = None
# Check that we can fetch it at the right value
ds = gdal.Open('tmp/nitf40.ntf')
assert ds.GetRasterBand(1).ReadRaster(x, y, 1, 1) == data
ds = None
# Check that it is indeed at a very far offset, and that the NITF driver
# has not put it somewhere else due to involuntary cast to 32bit integer.
blockWidth = 256
blockHeight = 256
nBlockx = int((width + blockWidth - 1) / blockWidth)
iBlockx = int(x / blockWidth)
iBlocky = int(y / blockHeight)
ix = x % blockWidth
iy = y % blockHeight
offset = 843 + (iBlocky * nBlockx + iBlockx) * blockWidth * blockHeight + (iy * blockWidth + ix)
try:
os.SEEK_SET
except AttributeError:
os.SEEK_SET, os.SEEK_CUR, os.SEEK_END = list(range(3))
fd = open('tmp/nitf40.ntf', 'rb')
fd.seek(offset, os.SEEK_SET)
bytes_read = fd.read(1)
fd.close()
val = struct.unpack('B' * 1, bytes_read)[0]
assert val == 123, ('Bad value at offset %d : %d' % (offset, val))
###############################################################################
# Check reading a 12-bit JPEG compressed NITF
def test_nitf_41(not_jpeg_9b):
# Check if JPEG driver supports 12bit JPEG reading/writing
jpg_drv = gdal.GetDriverByName('JPEG')
md = jpg_drv.GetMetadata()
if md[gdal.DMD_CREATIONDATATYPES].find('UInt16') == -1:
pytest.skip('12bit jpeg not available')
gdal.Unlink('data/nitf/U_4017A.NTF.aux.xml')
ds = gdal.Open('data/nitf/U_4017A.NTF')
assert ds.GetRasterBand(1).DataType == gdal.GDT_UInt16
stats = ds.GetRasterBand(1).GetStatistics(0, 1)
assert stats[2] >= 2385 and stats[2] <= 2386
ds = None
gdal.Unlink('data/nitf/U_4017A.NTF.aux.xml')
###############################################################################
# Check creating a 12-bit JPEG compressed NITF
def test_nitf_42(not_jpeg_9b):
# Check if JPEG driver supports 12bit JPEG reading/writing
jpg_drv = gdal.GetDriverByName('JPEG')
md = jpg_drv.GetMetadata()
if md[gdal.DMD_CREATIONDATATYPES].find('UInt16') == -1:
pytest.skip('12bit jpeg not available')
ds = gdal.Open('data/nitf/U_4017A.NTF')
out_ds = gdal.GetDriverByName('NITF').CreateCopy('tmp/nitf42.ntf', ds, options=['IC=C3', 'FHDR=NITF02.10'])
del out_ds
ds = gdal.Open('tmp/nitf42.ntf')
assert ds.GetRasterBand(1).DataType == gdal.GDT_UInt16
stats = ds.GetRasterBand(1).GetStatistics(0, 1)
assert stats[2] >= 2385 and stats[2] <= 2386
ds = None
###############################################################################
# Test CreateCopy() in IC=C8 with various JPEG2000 drivers
def nitf_43(driver_to_test, options):
jp2_drv = gdal.GetDriverByName(driver_to_test)
if driver_to_test == 'JP2ECW' and jp2_drv is not None:
if 'DMD_CREATIONOPTIONLIST' not in jp2_drv.GetMetadata():
jp2_drv = None
if jp2_drv is None:
pytest.skip()
# Deregister other potential conflicting JPEG2000 drivers
gdaltest.deregister_all_jpeg2000_drivers_but(driver_to_test)
ds = gdal.Open('data/byte.tif')
gdal.PushErrorHandler('CPLQuietErrorHandler')
out_ds = gdal.GetDriverByName('NITF').CreateCopy('tmp/nitf_43.ntf', ds, options=options, strict=0)
gdal.PopErrorHandler()
out_ds = None
out_ds = gdal.Open('tmp/nitf_43.ntf')
if out_ds.GetRasterBand(1).Checksum() == 4672:
ret = 'success'
else:
ret = 'fail'
out_ds = None
if open('tmp/nitf_43.ntf', 'rb').read().decode('LATIN1').find('<gml') >= 0:
print('GMLJP2 detected !')
ret = 'fail'
gdal.GetDriverByName('NITF').Delete('tmp/nitf_43.ntf')
gdaltest.reregister_all_jpeg2000_drivers()
return ret
def test_nitf_43_jasper():
return nitf_43('JPEG2000', ['IC=C8'])
def test_nitf_43_jp2ecw():
import ecw
if not ecw.has_write_support():
pytest.skip()
return nitf_43('JP2ECW', ['IC=C8', 'TARGET=0'])
def test_nitf_43_jp2kak():
return nitf_43('JP2KAK', ['IC=C8', 'QUALITY=100'])
###############################################################################
# Check creating a monoblock 10000x1 image (ticket #3263)
def test_nitf_44():
out_ds = gdal.GetDriverByName('NITF').Create('tmp/nitf44.ntf', 10000, 1)
out_ds.GetRasterBand(1).Fill(255)
out_ds = None
ds = gdal.Open('tmp/nitf44.ntf')
if 'GetBlockSize' in dir(gdal.Band):
(blockx, _) = ds.GetRasterBand(1).GetBlockSize()
assert blockx == 10000
assert ds.GetRasterBand(1).Checksum() == 57182
ds = None
###############################################################################
# Check overviews on a JPEG compressed subdataset
def test_nitf_45():
try:
os.remove('tmp/nitf45.ntf.aux.xml')
except OSError:
pass
shutil.copyfile('data/nitf/two_images_jpeg.ntf', 'tmp/nitf45.ntf')
ds = gdal.Open('NITF_IM:1:tmp/nitf45.ntf', gdal.GA_Update)
ds.BuildOverviews(overviewlist=[2])
# FIXME ? ds.GetRasterBand(1).GetOverview(0) is None until we reopen
ds = None
ds = gdal.Open('NITF_IM:1:tmp/nitf45.ntf')
cs = ds.GetRasterBand(1).GetOverview(0).Checksum()
assert cs == 1086, 'did not get expected checksum for overview of subdataset'
ds = None
###############################################################################
# Check overviews on a JPEG2000 compressed subdataset
def nitf_46(driver_to_test):
jp2_drv = gdal.GetDriverByName(driver_to_test)
if jp2_drv is None:
pytest.skip()
# Deregister other potential conflicting JPEG2000 drivers
gdaltest.deregister_all_jpeg2000_drivers_but(driver_to_test)
try:
os.remove('tmp/nitf46.ntf.aux.xml')
except OSError:
pass
try:
os.remove('tmp/nitf46.ntf_0.ovr')
except OSError:
pass
shutil.copyfile('data/nitf/two_images_jp2.ntf', 'tmp/nitf46.ntf')
ds = gdal.Open('NITF_IM:1:tmp/nitf46.ntf', gdal.GA_Update)
ds.BuildOverviews(overviewlist=[2])
# FIXME ? ds.GetRasterBand(1).GetOverview(0) is None until we reopen
ds = None
ds = gdal.Open('NITF_IM:1:tmp/nitf46.ntf')
if ds.GetRasterBand(1).GetOverview(0) is None:
gdaltest.post_reason('no overview of subdataset')
ret = 'fail'
else:
cs = ds.GetRasterBand(1).GetOverview(0).Checksum()
if cs != 1086:
print(cs)
gdaltest.post_reason('did not get expected checksum for overview of subdataset')
ret = 'fail'
else:
ret = 'success'
ds = None
gdaltest.reregister_all_jpeg2000_drivers()
return ret
def nitf_46_jp2ecw():
return nitf_46('JP2ECW')
def nitf_46_jp2mrsid():
return nitf_46('JP2MrSID')
def nitf_46_jp2kak():
return nitf_46('JP2KAK')
def test_nitf_46_jasper():
return nitf_46('JPEG2000')
def nitf_46_openjpeg():
return nitf_46('JP2OpenJPEG')
###############################################################################
# Check reading of rsets.
def test_nitf_47():
ds = gdal.Open('data/nitf/rset.ntf.r0')
band = ds.GetRasterBand(2)
assert band.GetOverviewCount() == 2, \
'did not get the expected number of rset overviews.'
cs = band.GetOverview(1).Checksum()
assert cs == 1297, 'did not get expected checksum for overview of subdataset'
ds = None
###############################################################################
# Check building of standard overviews in place of rset overviews.
def test_nitf_48():
try:
os.remove('tmp/rset.ntf.r0')
os.remove('tmp/rset.ntf.r1')
os.remove('tmp/rset.ntf.r2')
os.remove('tmp/rset.ntf.r0.ovr')
except OSError:
pass
shutil.copyfile('data/nitf/rset.ntf.r0', 'tmp/rset.ntf.r0')
shutil.copyfile('data/nitf/rset.ntf.r1', 'tmp/rset.ntf.r1')
shutil.copyfile('data/nitf/rset.ntf.r2', 'tmp/rset.ntf.r2')
ds = gdal.Open('tmp/rset.ntf.r0', gdal.GA_Update)
ds.BuildOverviews(overviewlist=[3])
ds = None
ds = gdal.Open('tmp/rset.ntf.r0')
assert ds.GetRasterBand(1).GetOverviewCount() == 1, \
'did not get the expected number of rset overviews.'
cs = ds.GetRasterBand(1).GetOverview(0).Checksum()
assert cs == 2328, 'did not get expected checksum for overview of subdataset'
ds = None
try:
os.remove('tmp/rset.ntf.r0')
os.remove('tmp/rset.ntf.r1')
os.remove('tmp/rset.ntf.r2')
os.remove('tmp/rset.ntf.r0.ovr')
except OSError:
pass
###############################################################################
# Test TEXT and CGM creation options with CreateCopy() (#3376)
def test_nitf_49():
options = ["TEXT=DATA_0=COUCOU",
"TEXT=HEADER_0=ABC", # This content is invalid but who cares here
"CGM=SEGMENT_COUNT=1",
"CGM=SEGMENT_0_SLOC_ROW=25",
"CGM=SEGMENT_0_SLOC_COL=25",
"CGM=SEGMENT_0_SDLVL=2",
"CGM=SEGMENT_0_SALVL=1",
"CGM=SEGMENT_0_DATA=XYZ"]
src_ds = gdal.Open('data/nitf/text_md.vrt')
# This will check that the creation option overrides the TEXT metadata domain from the source
ds = gdal.GetDriverByName('NITF').CreateCopy('tmp/nitf49.ntf', src_ds,
options=options)
# Test copy from source TEXT and CGM metadata domains
ds2 = gdal.GetDriverByName('NITF').CreateCopy('tmp/nitf49_2.ntf', ds)
md = ds2.GetMetadata('TEXT')
if 'DATA_0' not in md or md['DATA_0'] != 'COUCOU' or \
'HEADER_0' not in md or md['HEADER_0'].find('ABC ') == -1:
gdaltest.post_reason('did not get expected TEXT metadata')
print(md)
return
md = ds2.GetMetadata('CGM')
if 'SEGMENT_COUNT' not in md or md['SEGMENT_COUNT'] != '1' or \
'SEGMENT_0_DATA' not in md or md['SEGMENT_0_DATA'] != 'XYZ':
gdaltest.post_reason('did not get expected CGM metadata')
print(md)
return
src_ds = None
ds = None
ds2 = None
###############################################################################
# Test TEXT and CGM creation options with Create() (#3376)
def test_nitf_50():
options = [ # "IC=C8",
"TEXT=DATA_0=COUCOU",
"TEXT=HEADER_0=ABC", # This content is invalid but who cares here
"CGM=SEGMENT_COUNT=1",
"CGM=SEGMENT_0_SLOC_ROW=25",
"CGM=SEGMENT_0_SLOC_COL=25",
"CGM=SEGMENT_0_SDLVL=2",
"CGM=SEGMENT_0_SALVL=1",
"CGM=SEGMENT_0_DATA=XYZ"]
try:
os.remove('tmp/nitf50.ntf')
except OSError:
pass
# This will check that the creation option overrides the TEXT metadata domain from the source
ds = gdal.GetDriverByName('NITF').Create('tmp/nitf50.ntf', 100, 100, 3, options=options)
ds.WriteRaster(0, 0, 100, 100, ' ', 1, 1,
buf_type=gdal.GDT_Byte,
band_list=[1, 2, 3])
ds.GetRasterBand(1).SetRasterColorInterpretation(gdal.GCI_BlueBand)
ds.GetRasterBand(2).SetRasterColorInterpretation(gdal.GCI_GreenBand)
ds.GetRasterBand(3).SetRasterColorInterpretation(gdal.GCI_RedBand)
# We need to reopen the dataset, because the TEXT and CGM segments are only written
# when closing the dataset (for JP2 compressed datastreams, we need to wait for the
# imagery to be written)
ds = None
ds = gdal.Open('tmp/nitf50.ntf')
md = ds.GetMetadata('TEXT')
if 'DATA_0' not in md or md['DATA_0'] != 'COUCOU' or \
'HEADER_0' not in md or md['HEADER_0'].find('ABC ') == -1:
gdaltest.post_reason('did not get expected TEXT metadata')
print(md)
return
md = ds.GetMetadata('CGM')
if 'SEGMENT_COUNT' not in md or md['SEGMENT_COUNT'] != '1' or \
'SEGMENT_0_DATA' not in md or md['SEGMENT_0_DATA'] != 'XYZ':
gdaltest.post_reason('did not get expected CGM metadata')
print(md)
return
ds = None
###############################################################################
# Test reading very small images with NBPP < 8 or NBPP == 12
def test_nitf_51():
for xsize in range(1, 9):
for nbpp in [1, 2, 3, 4, 5, 6, 7, 12]:
ds = gdal.GetDriverByName('NITF').Create('tmp/nitf51.ntf', xsize, 1)
ds = None
f = open('tmp/nitf51.ntf', 'rb+')
# Patch NBPP value at offset 811
f.seek(811)
f.write(struct.pack('B' * 2, 48 + int(nbpp / 10), 48 + nbpp % 10))
# Write image data
f.seek(843)
n = int((xsize * nbpp + 7) / 8)
for i in range(n):
f.write(struct.pack('B' * 1, 255))
f.close()
ds = gdal.Open('tmp/nitf51.ntf')
if nbpp == 12:
data = ds.GetRasterBand(1).ReadRaster(0, 0, xsize, 1, buf_type=gdal.GDT_UInt16)
arr = struct.unpack('H' * xsize, data)
else:
data = ds.GetRasterBand(1).ReadRaster(0, 0, xsize, 1)
arr = struct.unpack('B' * xsize, data)
ds = None
for i in range(xsize):
if arr[i] != (1 << nbpp) - 1:
print('xsize = %d, nbpp = %d' % (xsize, nbpp))
pytest.fail('did not get expected data')
###############################################################################
# Test reading GeoSDE TREs
def test_nitf_52():
# Create a fake NITF file with GeoSDE TREs (probably not conformant, but enough to test GDAL code)
ds = gdal.GetDriverByName('NITF').Create('tmp/nitf52.ntf', 1, 1, options=['FILE_TRE=GEOPSB=01234567890123456789012345678901234567890123456789012345678901234567890123456789012345EURM ',
'FILE_TRE=PRJPSB=01234567890123456789012345678901234567890123456789012345678901234567890123456789AC0000000000000000000000000000000',
'TRE=MAPLOB=M 0001000010000000000100000000000005000000'])
ds = None
ds = gdal.Open('tmp/nitf52.ntf')
wkt = ds.GetProjectionRef()
gt = ds.GetGeoTransform()
ds = None
expected_wkt ="""PROJCS["unnamed",GEOGCS["EUROPEAN 1950, Mean (3 Param)",DATUM["EUROPEAN_1950_Mean_3_Param",SPHEROID["International 1924",6378388,297],TOWGS84[-87,-98,-121,0,0,0,0]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]]],PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["latitude_of_center",0],PARAMETER["longitude_of_center",0],PARAMETER["standard_parallel_1",0],PARAMETER["standard_parallel_2",0],PARAMETER["false_easting",0],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH]]"""
assert wkt in (expected_wkt, expected_wkt.replace('EUROPEAN_1950_Mean_3_Param', 'EUROPEAN 1950, Mean (3 Param)'))
assert gt == (100000.0, 10.0, 0.0, 5000000.0, 0.0, -10.0), \
'did not get expected geotransform'
###############################################################################
# Test reading UTM MGRS
def test_nitf_53():
ds = gdal.GetDriverByName('NITF').Create('tmp/nitf53.ntf', 2, 2, options=['ICORDS=N'])
ds = None
f = open('tmp/nitf53.ntf', 'rb+')
# Patch ICORDS and IGEOLO
f.seek(775)
f.write(b'U')
f.write(b'31UBQ1000040000')
f.write(b'31UBQ2000040000')
f.write(b'31UBQ2000030000')
f.write(b'31UBQ1000030000')
f.close()
ds = gdal.Open('tmp/nitf53.ntf')
wkt = ds.GetProjectionRef()
gt = ds.GetGeoTransform()
ds = None
assert 'PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",3],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]' in wkt, \
'did not get expected SRS'
assert gt == (205000.0, 10000.0, 0.0, 5445000.0, 0.0, -10000.0), \
'did not get expected geotransform'
###############################################################################
# Test reading RPC00B
def test_nitf_54():
# Create a fake NITF file with RPC00B TRE (probably not conformant, but enough to test GDAL code)
RPC00B = '100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'
ds = gdal.GetDriverByName('NITF').Create('tmp/nitf54.ntf', 1, 1, options=['TRE=RPC00B=' + RPC00B])
ds = None
ds = gdal.Open('tmp/nitf54.ntf')
md = ds.GetMetadata('RPC')
ds = None
assert md is not None and 'HEIGHT_OFF' in md
###############################################################################
# Test reading ICHIPB
def test_nitf_55():
# Create a fake NITF file with ICHIPB TRE (probably not conformant, but enough to test GDAL code)
ICHIPB = '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'
ds = gdal.GetDriverByName('NITF').Create('tmp/nitf55.ntf', 1, 1, options=['TRE=ICHIPB=' + ICHIPB])
ds = None
ds = gdal.Open('tmp/nitf55.ntf')
md = ds.GetMetadata()
ds = None
assert md is not None and 'ICHIP_SCALE_FACTOR' in md
###############################################################################
# Test reading USE00A
def test_nitf_56():
# Create a fake NITF file with USE00A TRE (probably not conformant, but enough to test GDAL code)
USE00A = '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'
ds = gdal.GetDriverByName('NITF').Create('tmp/nitf56.ntf', 1, 1, options=['TRE=USE00A=' + USE00A])
ds = None
ds = gdal.Open('tmp/nitf56.ntf')
md = ds.GetMetadata()
ds = None
assert md is not None and 'NITF_USE00A_ANGLE_TO_NORTH' in md
###############################################################################
# Test reading GEOLOB
def test_nitf_57():
# Create a fake NITF file with GEOLOB TRE
GEOLOB = '000000360000000360-180.000000000090.000000000000'
ds = gdal.GetDriverByName('NITF').Create('tmp/nitf57.ntf', 1, 1, options=['TRE=GEOLOB=' + GEOLOB])
ds = None
ds = gdal.Open('tmp/nitf57.ntf')
gt = ds.GetGeoTransform()
ds = None
if gt != (-180.0, 1.0, 0.0, 90.0, 0.0, -1.0):
gdaltest.post_reason('did not get expected geotransform')
print(gt)
return
###############################################################################
# Test reading STDIDC
def test_nitf_58():
# Create a fake NITF file with STDIDC TRE (probably not conformant, but enough to test GDAL code)
STDIDC = '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'
ds = gdal.GetDriverByName('NITF').Create('tmp/nitf58.ntf', 1, 1, options=['TRE=STDIDC=' + STDIDC])
ds = None
ds = gdal.Open('tmp/nitf58.ntf')
md = ds.GetMetadata()
ds = None
assert md is not None and 'NITF_STDIDC_ACQUISITION_DATE' in md
###############################################################################
# Test reading IMRFCA and IMASDA
def test_nitf_read_IMRFCA_IMASDA():
# Create a fake NITF file with fake IMRFCA and IMASDA TRE
IMRFCA = '0' * 1760
IMASDA = '0' * 242
tmpfile = '/vsimem/nitf_read_IMRFCA_IMASDA.ntf'
gdal.GetDriverByName('NITF').Create(tmpfile, 1, 1, options=['TRE=IMRFCA=' + IMRFCA, 'TRE=IMASDA=' + IMASDA])
ds = gdal.Open(tmpfile)
md = ds.GetMetadata('RPC')
ds = None
gdal.Unlink(tmpfile)
assert not (md is None or md == {})
# Only IMRFCA
gdal.GetDriverByName('NITF').Create(tmpfile, 1, 1, options=['TRE=IMRFCA=' + IMRFCA])
ds = gdal.Open(tmpfile)
md = ds.GetMetadata('RPC')
ds = None
gdal.Unlink(tmpfile)
assert md == {}
# Only IMASDA
gdal.GetDriverByName('NITF').Create(tmpfile, 1, 1, options=['TRE=IMASDA=' + IMASDA])
ds = gdal.Open(tmpfile)
md = ds.GetMetadata('RPC')
ds = None
gdal.Unlink(tmpfile)
assert md == {}
# Too short IMRFCA
with gdaltest.error_handler():
gdal.GetDriverByName('NITF').Create(tmpfile, 1, 1, options=['TRE=IMRFCA=' + IMRFCA[0:-1], 'TRE=IMASDA=' + IMASDA])
ds = gdal.Open(tmpfile)
md = ds.GetMetadata('RPC')
ds = None
gdal.Unlink(tmpfile)
assert md == {}
# Too short IMASDA
with gdaltest.error_handler():
gdal.GetDriverByName('NITF').Create(tmpfile, 1, 1, options=['TRE=IMRFCA=' + IMRFCA, 'TRE=IMASDA=' + IMASDA[0:-1]])
ds = gdal.Open(tmpfile)
md = ds.GetMetadata('RPC')
ds = None
gdal.Unlink(tmpfile)
assert md == {}
###############################################################################
# Test georeferencing through .nfw and .hdr files
def test_nitf_59():
shutil.copyfile('data/nitf/nitf59.nfw', 'tmp/nitf59.nfw')
shutil.copyfile('data/nitf/nitf59.hdr', 'tmp/nitf59.hdr')
ds = gdal.GetDriverByName('NITF').Create('tmp/nitf59.ntf', 1, 1, options=['ICORDS=N'])
ds = None
ds = gdal.Open('tmp/nitf59.ntf')
wkt = ds.GetProjectionRef()
gt = ds.GetGeoTransform()
ds = None
assert """PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",3],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0]""" in wkt, \
'did not get expected SRS'
assert gt == (149999.5, 1.0, 0.0, 4500000.5, 0.0, -1.0), \
'did not get expected geotransform'
###############################################################################
# Test reading CADRG polar tile georeferencing (#2940)
def test_nitf_60():
# Shut down errors because the file is truncated
gdal.PushErrorHandler('CPLQuietErrorHandler')
ds = gdal.Open('data/nitf/testtest.on9')
gdal.PopErrorHandler()
wkt = ds.GetProjectionRef()
gt = ds.GetGeoTransform()
ds = None
assert wkt == """PROJCS["unknown",GEOGCS["unknown",DATUM["unknown",SPHEROID["unknown",6378137,0]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]]],PROJECTION["Azimuthal_Equidistant"],PARAMETER["latitude_of_center",90],PARAMETER["longitude_of_center",0],PARAMETER["false_easting",0],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH]]""", \
'did not get expected SRS'
ref_gt = [1036422.8453166834, 149.94543479697344, 0.0, 345474.28177222813, 0.0, -149.94543479697404]
for i in range(6):
assert gt[i] == pytest.approx(ref_gt[i], abs=1e-6), 'did not get expected geotransform'
###############################################################################
# Test reading TRE from DE segment
def test_nitf_61():
# Derived from http://www.gwg.nga.mil/ntb/baseline/software/testfile/rsm/SampleFiles/FrameSet1/NITF_Files/i_6130a.zip
# but hand edited to have just 1x1 imagery
ds = gdal.Open('data/nitf/i_6130a_truncated.ntf')
md = ds.GetMetadata('TRE')
xml_tre = ds.GetMetadata('xml:TRE')[0]
ds = None
assert md is not None and 'RSMDCA' in md and 'RSMECA' in md and 'RSMPCA' in md and 'RSMIDA' in md
assert xml_tre.find('<tre name="RSMDCA"') != -1, 'did not get expected xml:TRE'
###############################################################################
# Test creating & reading image comments
def test_nitf_62():
# 80+1 characters
comments = '012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678ZA'
ds = gdal.GetDriverByName('NITF').Create('tmp/nitf62.ntf', 1, 1, options=['ICOM=' + comments])
ds = None
ds = gdal.Open('tmp/nitf62.ntf')
md = ds.GetMetadata()
ds = None
got_comments = md['NITF_IMAGE_COMMENTS']
if len(got_comments) != 160 or got_comments.find(comments) == -1:
print("'%s'" % got_comments)
pytest.fail('did not get expected comments')
###############################################################################
# Test NITFReadImageLine() and NITFWriteImageLine() when nCols < nBlockWidth (#3551)
def test_nitf_63():
ds = gdal.GetDriverByName('NITF').Create('tmp/nitf63.ntf', 50, 25, 3, gdal.GDT_Int16, options=['BLOCKXSIZE=256'])
ds = None
try:
os.SEEK_SET
except AttributeError:
os.SEEK_SET, os.SEEK_CUR, os.SEEK_END = list(range(3))
# Patch IMODE at hand
f = open('tmp/nitf63.ntf', 'r+')
f.seek(820, os.SEEK_SET)
f.write('P')
f.close()
ds = gdal.Open('tmp/nitf63.ntf', gdal.GA_Update)
md = ds.GetMetadata()
assert md['NITF_IMODE'] == 'P', 'wrong IMODE'
ds.GetRasterBand(1).Fill(0)
ds.GetRasterBand(2).Fill(127)
ds.GetRasterBand(3).Fill(255)
ds = None
ds = gdal.Open('tmp/nitf63.ntf')
cs1 = ds.GetRasterBand(1).Checksum()
cs2 = ds.GetRasterBand(2).Checksum()
cs3 = ds.GetRasterBand(3).Checksum()
ds = None
assert cs1 == 0 and cs2 == 14186 and cs3 == 15301, \
('did not get expected checksums : (%d, %d, %d) instead of (0, 14186, 15301)' % (cs1, cs2, cs3))
###############################################################################
# Test SDE_TRE creation option
def test_nitf_64():
src_ds = gdal.GetDriverByName('GTiff').Create('/vsimem/nitf_64.tif', 256, 256, 1)
src_ds.SetGeoTransform([2.123456789, 0.123456789, 0, 49.123456789, 0, -0.123456789])
sr = osr.SpatialReference()
sr.SetWellKnownGeogCS('WGS84')
src_ds.SetProjection(sr.ExportToWkt())
ds = gdal.GetDriverByName('NITF').CreateCopy('/vsimem/nitf_64.ntf', src_ds, options=['ICORDS=D'])
ds = None
ds = gdal.Open('/vsimem/nitf_64.ntf')
# One can notice that the topleft location is only precise to the 3th decimal !
expected_gt = (2.123270588235294, 0.12345882352941177, 0.0, 49.123729411764707, 0.0, -0.12345882352941176)
got_gt = ds.GetGeoTransform()
for i in range(6):
assert expected_gt[i] == pytest.approx(got_gt[i], abs=1e-10), \
'did not get expected GT in ICORDS=D mode'
ds = None
ds = gdal.GetDriverByName('NITF').CreateCopy('/vsimem/nitf_64.ntf', src_ds, options=['ICORDS=G'])
ds = None
ds = gdal.Open('/vsimem/nitf_64.ntf')
# One can notice that the topleft location is only precise to the 3th decimal !
expected_gt = (2.1235495642701521, 0.12345642701525053, 0.0, 49.123394880174288, 0.0, -0.12345642701525052)
got_gt = ds.GetGeoTransform()
for i in range(6):
assert expected_gt[i] == pytest.approx(got_gt[i], abs=1e-10), \
'did not get expected GT in ICORDS=G mode'
ds = None
ds = gdal.GetDriverByName('NITF').CreateCopy('/vsimem/nitf_64.ntf', src_ds, options=['SDE_TRE=YES'])
ds = None
ds = gdal.Open('/vsimem/nitf_64.ntf')
# One can notice that the topleft location is precise up to the 9th decimal
expected_gt = (2.123456789, 0.1234567901234568, 0.0, 49.123456789000002, 0.0, -0.12345679012345678)
got_gt = ds.GetGeoTransform()
for i in range(6):
assert expected_gt[i] == pytest.approx(got_gt[i], abs=1e-10), \
'did not get expected GT in SDE_TRE mode'
ds = None
src_ds = None
gdal.Unlink('/vsimem/nitf_64.tif')
gdal.Unlink('/vsimem/nitf_64.ntf')
###############################################################################
# Test creating an image with block_width = image_width > 8192 (#3922)
def test_nitf_65():
ds = gdal.GetDriverByName('NITF').Create('/vsimem/nitf_65.ntf', 10000, 100, options=['BLOCKXSIZE=10000'])
ds = None
ds = gdal.Open('/vsimem/nitf_65.ntf')
(block_xsize, _) = ds.GetRasterBand(1).GetBlockSize()
ds.GetRasterBand(1).Checksum()
ds = None
gdal.Unlink('/vsimem/nitf_65.ntf')
assert block_xsize == 10000
###############################################################################
# Test creating an image with block_height = image_height > 8192 (#3922)
def test_nitf_66():
ds = gdal.GetDriverByName('NITF').Create('/vsimem/nitf_66.ntf', 100, 10000, options=['BLOCKYSIZE=10000', 'BLOCKXSIZE=50'])
ds = None
ds = gdal.Open('/vsimem/nitf_66.ntf')
(_, block_ysize) = ds.GetRasterBand(1).GetBlockSize()
ds.GetRasterBand(1).Checksum()
ds = None
gdal.Unlink('/vsimem/nitf_66.ntf')
assert block_ysize == 10000
###############################################################################
# Test that we don't use scanline access in illegal cases (#3926)
def test_nitf_67():
src_ds = gdal.Open('data/byte.tif')
gdal.PushErrorHandler('CPLQuietErrorHandler')
ds = gdal.GetDriverByName('NITF').CreateCopy('/vsimem/nitf_67.ntf', src_ds, options=['BLOCKYSIZE=1', 'BLOCKXSIZE=10'], strict=0)
gdal.PopErrorHandler()
ds = None
src_ds = None
ds = gdal.Open('/vsimem/nitf_67.ntf')
cs = ds.GetRasterBand(1).Checksum()
ds = None
gdal.Unlink('/vsimem/nitf_67.ntf')
gdal.Unlink('/vsimem/nitf_67.ntf.aux.xml')
assert cs == 4672
###############################################################################
# Test reading NITF_METADATA domain
def test_nitf_68():
ds = gdal.Open('data/nitf/rgb.ntf')
assert len(ds.GetMetadata('NITF_METADATA')) == 2
ds = None
ds = gdal.Open('data/nitf/rgb.ntf')
assert ds.GetMetadataItem('NITFFileHeader', 'NITF_METADATA')
ds = None
###############################################################################
# Test SetGCPs() support
def test_nitf_69():
vrt_txt = """<VRTDataset rasterXSize="20" rasterYSize="20">
<GCPList Projection='GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]'>
<GCP Id="" Pixel="0.5" Line="0.5" X="2" Y="49"/>
<GCP Id="" Pixel="0.5" Line="19.5" X="2" Y="48"/>
<GCP Id="" Pixel="19.5" Line="0.5" X="3" Y="49.5"/>
<GCP Id="" Pixel="19.5" Line="19.5" X="3" Y="48"/>
</GCPList>
<VRTRasterBand dataType="Byte" band="1">
<SimpleSource>
<SourceFilename relativeToVRT="1">data/byte.tif</SourceFilename>
<SourceProperties RasterXSize="20" RasterYSize="20" DataType="Byte" BlockXSize="20" BlockYSize="20" />
<SourceBand>1</SourceBand>
</SimpleSource>
</VRTRasterBand>
</VRTDataset>"""
# Test CreateCopy()
vrt_ds = gdal.Open(vrt_txt)
ds = gdal.GetDriverByName('NITF').CreateCopy('/vsimem/nitf_69_src.ntf', vrt_ds)
ds = None
vrt_ds = None
# Just in case
gdal.Unlink('/vsimem/nitf_69_src.ntf.aux.xml')
# Test Create() and SetGCPs()
src_ds = gdal.Open('/vsimem/nitf_69_src.ntf')
ds = gdal.GetDriverByName('NITF').Create('/vsimem/nitf_69_dest.ntf', 20, 20, 1, options=['ICORDS=G'])
ds.SetGCPs(src_ds.GetGCPs(), src_ds.GetGCPProjection())
ds.SetGCPs(src_ds.GetGCPs(), src_ds.GetGCPProjection()) # To check we can call it several times without error
ds = None
src_ds = None
# Now open again
ds = gdal.Open('/vsimem/nitf_69_dest.ntf')
got_gcps = ds.GetGCPs()
ds = None
gdal.Unlink('/vsimem/nitf_69_src.ntf')
gdal.Unlink('/vsimem/nitf_69_dest.ntf')
# Check
# Upper-left
assert (got_gcps[0].GCPPixel == pytest.approx(0.5, abs=1e-5) and got_gcps[0].GCPLine == pytest.approx(0.5, abs=1e-5) and \
got_gcps[0].GCPX == pytest.approx(2, abs=1e-5) and got_gcps[0].GCPY == pytest.approx(49, abs=1e-5)), \
'wrong gcp'
# Upper-right
assert (got_gcps[1].GCPPixel == pytest.approx(19.5, abs=1e-5) and got_gcps[1].GCPLine == pytest.approx(0.5, abs=1e-5) and \
got_gcps[1].GCPX == pytest.approx(3, abs=1e-5) and got_gcps[1].GCPY == pytest.approx(49.5, abs=1e-5)), \
'wrong gcp'
# Lower-right
assert (got_gcps[2].GCPPixel == pytest.approx(19.5, abs=1e-5) and got_gcps[2].GCPLine == pytest.approx(19.5, abs=1e-5) and \
got_gcps[2].GCPX == pytest.approx(3, abs=1e-5) and got_gcps[2].GCPY == pytest.approx(48, abs=1e-5)), \
'wrong gcp'
# Lower-left
assert (got_gcps[3].GCPPixel == pytest.approx(0.5, abs=1e-5) and got_gcps[3].GCPLine == pytest.approx(19.5, abs=1e-5) and \
got_gcps[3].GCPX == pytest.approx(2, abs=1e-5) and got_gcps[3].GCPY == pytest.approx(48, abs=1e-5)), \
'wrong gcp'
###############################################################################
# Create and read a JPEG encoded NITF file with NITF dimensions != JPEG dimensions
def test_nitf_70():
src_ds = gdal.Open('data/rgbsmall.tif')
ds = gdal.GetDriverByName('NITF').CreateCopy('tmp/nitf_70.ntf', src_ds,
options=['IC=C3', 'BLOCKXSIZE=64', 'BLOCKYSIZE=64'])
ds = None
# For comparison
ds = gdal.GetDriverByName('GTiff').CreateCopy('tmp/nitf_70.tif', src_ds,
options=['COMPRESS=JPEG', 'PHOTOMETRIC=YCBCR', 'TILED=YES', 'BLOCKXSIZE=64', 'BLOCKYSIZE=64'])
ds = None
src_ds = None
ds = gdal.Open('tmp/nitf_70.ntf')
cs = ds.GetRasterBand(1).Checksum()
ds = None
ds = gdal.Open('tmp/nitf_70.tif')
cs_ref = ds.GetRasterBand(1).Checksum()
ds = None
gdal.GetDriverByName('NITF').Delete('tmp/nitf_70.ntf')
gdal.GetDriverByName('GTiff').Delete('tmp/nitf_70.tif')
assert cs == cs_ref
###############################################################################
# Test reading ENGRDA TRE (#6285)
def test_nitf_71():
ds = gdal.GetDriverByName('NITF').Create('/vsimem/nitf_71.ntf', 1, 1, options=['TRE=ENGRDA=0123456789012345678900210012345678901230123X01200000002XY01X01230123X01200000001X'])
ds = None
ds = gdal.Open('/vsimem/nitf_71.ntf')
data = ds.GetMetadata('xml:TRE')[0]
ds = None
gdal.GetDriverByName('NITF').Delete('/vsimem/nitf_71.ntf')
expected_data = """<tres>
<tre name="ENGRDA" location="image">
<field name="RESRC" value="01234567890123456789" />
<field name="RECNT" value="002" />
<repeated name="RECORDS" number="2">
<group index="0">
<field name="ENGLN" value="10" />
<field name="ENGLBL" value="0123456789" />
<field name="ENGMTXC" value="0123" />
<field name="ENGMTXR" value="0123" />
<field name="ENGTYP" value="X" />
<field name="ENGDTS" value="0" />
<field name="ENGDTU" value="12" />
<field name="ENGDATC" value="00000002" />
<field name="ENGDATA" value="XY" />
</group>
<group index="1">
<field name="ENGLN" value="01" />
<field name="ENGLBL" value="X" />
<field name="ENGMTXC" value="0123" />
<field name="ENGMTXR" value="0123" />
<field name="ENGTYP" value="X" />
<field name="ENGDTS" value="0" />
<field name="ENGDTU" value="12" />
<field name="ENGDATC" value="00000001" />
<field name="ENGDATA" value="X" />
</group>
</repeated>
</tre>
</tres>
"""
assert data == expected_data
###############################################################################
# Test writing and reading RPC00B
def compare_rpc(src_md, md):
# Check that we got data with the expected precision
for key in src_md:
if key == 'ERR_BIAS' or key == 'ERR_RAND':
continue
assert key in md, ('fail: %s missing' % key)
if 'COEFF' in key:
expected = [float(v) for v in src_md[key].strip().split(' ')]
found = [float(v) for v in md[key].strip().split(' ')]
if expected != found:
print(md)
pytest.fail('fail: %s value is not the one expected' % key)
elif float(src_md[key]) != float(md[key]):
print(md)
pytest.fail('fail: %s value is not the one expected' % key)
def test_nitf_72():
src_ds = gdal.GetDriverByName('MEM').Create('', 1, 1)
# Use full precision
src_md_max_precision = {
'ERR_BIAS': '1234.56',
'ERR_RAND': '2345.67',
'LINE_OFF': '345678',
'SAMP_OFF': '45678',
'LAT_OFF': '-89.8765',
'LONG_OFF': '-179.1234',
'HEIGHT_OFF': '-9876',
'LINE_SCALE': '987654',
'SAMP_SCALE': '67890',
'LAT_SCALE': '-12.3456',
'LONG_SCALE': '-123.4567',
'HEIGHT_SCALE': '-1234',
'LINE_NUM_COEFF': '0 9.876543e+9 9.876543e-9 -9.876543e+9 -9.876543e-9 0 9.876543e+9 9.876543e-9 -9.876543e+9 -9.876543e-9 0 9.876543e+9 9.876543e-9 -9.876543e+9 -9.876543e-9 0 9.876543e+9 9.876543e-9 -9.876543e+9 -9.876543e-9',
'LINE_DEN_COEFF': '1 9.876543e+9 9.876543e-9 -9.876543e+9 -9.876543e-9 0 9.876543e+9 9.876543e-9 -9.876543e+9 -9.876543e-9 0 9.876543e+9 9.876543e-9 -9.876543e+9 -9.876543e-9 0 9.876543e+9 9.876543e-9 -9.876543e+9 -9.876543e-9',
'SAMP_NUM_COEFF': '2 9.876543e+9 9.876543e-9 -9.876543e+9 -9.876543e-9 0 9.876543e+9 9.876543e-9 -9.876543e+9 -9.876543e-9 0 9.876543e+9 9.876543e-9 -9.876543e+9 -9.876543e-9 0 9.876543e+9 9.876543e-9 -9.876543e+9 -9.876543e-9',
'SAMP_DEN_COEFF': '3 9.876543e+9 9.876543e-9 -9.876543e+9 -9.876543e-9 0 9.876543e+9 9.876543e-9 -9.876543e+9 -9.876543e-9 0 9.876543e+9 9.876543e-9 -9.876543e+9 -9.876543e-9 0 9.876543e+9 9.876543e-9 -9.876543e+9 -9.876543e-9',
}
src_md = src_md_max_precision
src_ds.SetMetadata(src_md, 'RPC')
gdal.GetDriverByName('NITF').CreateCopy('/vsimem/nitf_72.ntf', src_ds)
assert gdal.GetLastErrorMsg() == '', 'fail: did not expect warning'
if gdal.VSIStatL('/vsimem/nitf_72.ntf.aux.xml') is not None:
f = gdal.VSIFOpenL('/vsimem/nitf_72.ntf.aux.xml', 'rb')
data = gdal.VSIFReadL(1, 10000, f)
gdal.VSIFCloseL(f)
print(str(data))
pytest.fail('fail: PAM file not expected')
ds = gdal.Open('/vsimem/nitf_72.ntf')
md = ds.GetMetadata('RPC')
RPC00B = ds.GetMetadataItem('RPC00B', 'TRE')
ds = None
gdal.GetDriverByName('NITF').Delete('/vsimem/nitf_72.ntf')
compare_rpc(src_md, md)
expected_RPC00B_max_precision = '11234.562345.6734567845678-89.8765-179.1234-987698765467890-12.3456-123.4567-1234+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+1.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+2.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+3.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9'
assert RPC00B == expected_RPC00B_max_precision, 'fail: did not get expected RPC00B'
# Test without ERR_BIAS and ERR_RAND
src_ds = gdal.GetDriverByName('MEM').Create('', 1, 1)
src_md = copy.copy(src_md_max_precision)
del src_md['ERR_BIAS']
del src_md['ERR_RAND']
src_ds.SetMetadata(src_md, 'RPC')
gdal.GetDriverByName('NITF').CreateCopy('/vsimem/nitf_72.ntf', src_ds)
assert gdal.GetLastErrorMsg() == '', 'fail: did not expect warning'
if gdal.VSIStatL('/vsimem/nitf_72.ntf.aux.xml') is not None:
f = gdal.VSIFOpenL('/vsimem/nitf_72.ntf.aux.xml', 'rb')
data = gdal.VSIFReadL(1, 10000, f)
gdal.VSIFCloseL(f)
print(str(data))
pytest.fail('fail: PAM file not expected')
ds = gdal.Open('/vsimem/nitf_72.ntf')
md = ds.GetMetadata('RPC')
RPC00B = ds.GetMetadataItem('RPC00B', 'TRE')
ds = None
expected_RPC00B = '10000.000000.0034567845678-89.8765-179.1234-987698765467890-12.3456-123.4567-1234+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+1.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+2.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+3.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9'
assert RPC00B == expected_RPC00B, 'fail: did not get expected RPC00B'
# Test that direct RPC00B copy works
src_nitf_ds = gdal.Open('/vsimem/nitf_72.ntf')
gdal.GetDriverByName('NITF').CreateCopy('/vsimem/nitf_72_copy.ntf', src_nitf_ds)
src_nitf_ds = None
ds = gdal.Open('/vsimem/nitf_72_copy.ntf')
md = ds.GetMetadata('RPC')
RPC00B = ds.GetMetadataItem('RPC00B', 'TRE')
ds = None
assert RPC00B == expected_RPC00B, 'fail: did not get expected RPC00B'
gdal.GetDriverByName('NITF').Delete('/vsimem/nitf_72.ntf')
gdal.GetDriverByName('NITF').Delete('/vsimem/nitf_72_copy.ntf')
# Test that RPC00B = NO works
gdal.GetDriverByName('NITF').CreateCopy('/vsimem/nitf_72.ntf', src_ds, options=['RPC00B=NO'])
assert gdal.VSIStatL('/vsimem/nitf_72.ntf.aux.xml') is not None, \
'fail: PAM file was expected'
ds = gdal.Open('/vsimem/nitf_72.ntf')
md = ds.GetMetadata('RPC')
RPC00B = ds.GetMetadataItem('RPC00B', 'TRE')
ds = None
gdal.GetDriverByName('NITF').Delete('/vsimem/nitf_72.ntf')
assert RPC00B is None, 'fail: did not expect RPC00B'
src_ds = gdal.GetDriverByName('MEM').Create('', 1, 1)
# Test padding
src_md = {
'ERR_BIAS': '123',
'ERR_RAND': '234',
'LINE_OFF': '3456',
'SAMP_OFF': '4567',
'LAT_OFF': '8',
'LONG_OFF': '17',
'HEIGHT_OFF': '987',
'LINE_SCALE': '98765',
'SAMP_SCALE': '6789',
'LAT_SCALE': '12',
'LONG_SCALE': '109',
'HEIGHT_SCALE': '34',
'LINE_NUM_COEFF': '0 9.87e+9 9.876543e-9 -9.876543e+9 -9.876543e-9 0 9.876543e+9 9.876543e-9 -9.876543e+9 -9.876543e-9 0 9.876543e+9 9.876543e-9 -9.876543e+9 -9.876543e-9 0 9.876543e+9 9.876543e-9 -9.876543e+9 -9.876543e-9',
'LINE_DEN_COEFF': '1 9.876543e+9 9.876543e-9 -9.876543e+9 -9.876543e-9 0 9.876543e+9 9.876543e-9 -9.876543e+9 -9.876543e-9 0 9.876543e+9 9.876543e-9 -9.876543e+9 -9.876543e-9 0 9.876543e+9 9.876543e-9 -9.876543e+9 -9.876543e-9',
'SAMP_NUM_COEFF': '2 9.876543e+9 9.876543e-9 -9.876543e+9 -9.876543e-9 0 9.876543e+9 9.876543e-9 -9.876543e+9 -9.876543e-9 0 9.876543e+9 9.876543e-9 -9.876543e+9 -9.876543e-9 0 9.876543e+9 9.876543e-9 -9.876543e+9 -9.876543e-9',
'SAMP_DEN_COEFF': '3 9.876543e+9 9.876543e-9 -9.876543e+9 -9.876543e-9 0 9.876543e+9 9.876543e-9 -9.876543e+9 -9.876543e-9 0 9.876543e+9 9.876543e-9 -9.876543e+9 -9.876543e-9 0 9.876543e+9 9.876543e-9 -9.876543e+9 -9.876543e-9',
}
src_ds.SetMetadata(src_md, 'RPC')
gdal.GetDriverByName('NITF').CreateCopy('/vsimem/nitf_72.ntf', src_ds)
assert gdal.GetLastErrorMsg() == '', 'fail: did not expect warning'
if gdal.VSIStatL('/vsimem/nitf_72.ntf.aux.xml') is not None:
f = gdal.VSIFOpenL('/vsimem/nitf_72.ntf.aux.xml', 'rb')
data = gdal.VSIFReadL(1, 10000, f)
gdal.VSIFCloseL(f)
print(str(data))
pytest.fail('fail: PAM file not expected')
ds = gdal.Open('/vsimem/nitf_72.ntf')
md = ds.GetMetadata('RPC')
RPC00B = ds.GetMetadataItem('RPC00B', 'TRE')
ds = None
gdal.GetDriverByName('NITF').Delete('/vsimem/nitf_72.ntf')
compare_rpc(src_md, md)
expected_RPC00B = '10123.000234.0000345604567+08.0000+017.0000+098709876506789+12.0000+109.0000+0034+0.000000E+0+9.870000E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+1.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+2.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+3.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9'
assert RPC00B == expected_RPC00B, 'fail: did not get expected RPC00B'
# Test loss of precision
for key in ('LINE_OFF', 'SAMP_OFF', 'LAT_OFF', 'LONG_OFF', 'HEIGHT_OFF', 'LINE_SCALE', 'SAMP_SCALE', 'LAT_SCALE', 'LONG_SCALE', 'HEIGHT_SCALE'):
src_ds = gdal.GetDriverByName('MEM').Create('', 1, 1)
src_md = copy.copy(src_md_max_precision)
if src_md[key].find('.') < 0:
src_md[key] += '.1'
else:
src_md[key] += '1'
src_ds.SetMetadata(src_md, 'RPC')
with gdaltest.error_handler():
ds = gdal.GetDriverByName('NITF').CreateCopy('/vsimem/nitf_72.ntf', src_ds)
assert ds is not None, 'fail: expected a dataset'
ds = None
assert gdal.GetLastErrorMsg() != '', 'fail: expected a warning'
assert gdal.VSIStatL('/vsimem/nitf_72.ntf.aux.xml') is not None, \
'fail: PAM file was expected'
gdal.Unlink('/vsimem/nitf_72.ntf.aux.xml')
ds = gdal.Open('/vsimem/nitf_72.ntf')
md = ds.GetMetadata('RPC')
RPC00B = ds.GetMetadataItem('RPC00B', 'TRE')
ds = None
gdal.GetDriverByName('NITF').Delete('/vsimem/nitf_72.ntf')
assert RPC00B == expected_RPC00B_max_precision, \
'fail: did not get expected RPC00B'
# Test loss of precision on coefficient lines
src_ds = gdal.GetDriverByName('MEM').Create('', 1, 1)
src_md = copy.copy(src_md_max_precision)
src_md['LINE_NUM_COEFF'] = '0 9.876543e-10 9.876543e-9 -9.876543e+9 -9.876543e-9 0 9.876543e+9 9.876543e-9 -9.876543e+9 -9.876543e-9 0 9.876543e+9 9.876543e-9 -9.876543e+9 -9.876543e-9 0 9.876543e+9 9.876543e-9 -9.876543e+9 -9.876543e-9'
src_ds.SetMetadata(src_md, 'RPC')
with gdaltest.error_handler():
ds = gdal.GetDriverByName('NITF').CreateCopy('/vsimem/nitf_72.ntf', src_ds)
assert ds is not None, 'fail: expected a dataset'
ds = None
assert gdal.GetLastErrorMsg() != '', 'fail: expected a warning'
assert gdal.VSIStatL('/vsimem/nitf_72.ntf.aux.xml') is not None, \
'fail: PAM file was expected'
gdal.Unlink('/vsimem/nitf_72.ntf.aux.xml')
ds = gdal.Open('/vsimem/nitf_72.ntf')
md = ds.GetMetadata('RPC')
RPC00B = ds.GetMetadataItem('RPC00B', 'TRE')
ds = None
gdal.GetDriverByName('NITF').Delete('/vsimem/nitf_72.ntf')
expected_RPC00B = '11234.562345.6734567845678-89.8765-179.1234-987698765467890-12.3456-123.4567-1234+0.000000E+0+0.000000E+0+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+1.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+2.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+3.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9+0.000000E+0+9.876543E+9+9.876543E-9-9.876543E+9-9.876543E-9'
assert RPC00B == expected_RPC00B, 'fail: did not get expected RPC00B'
# Test RPCTXT creation option
with gdaltest.error_handler():
gdal.GetDriverByName('NITF').CreateCopy('/vsimem/nitf_72.ntf', src_ds, options=['RPCTXT=YES'])
assert gdal.VSIStatL('/vsimem/nitf_72_RPC.TXT') is not None, \
'fail: rpc.txt file was expected'
ds = gdal.Open('/vsimem/nitf_72.ntf')
md = ds.GetMetadata('RPC')
RPC00B = ds.GetMetadataItem('RPC00B', 'TRE')
fl = ds.GetFileList()
ds = None
assert '/vsimem/nitf_72_RPC.TXT' in fl, \
'fail: _RPC.TXT file not reported in file list'
# Check that we get full precision from the _RPC.TXT file
compare_rpc(src_md, md)
assert RPC00B == expected_RPC00B, 'fail: did not get expected RPC00B'
# Test out of range
for key in ('LINE_OFF', 'SAMP_OFF', 'LAT_OFF', 'LONG_OFF', 'HEIGHT_OFF', 'LINE_SCALE', 'SAMP_SCALE', 'LAT_SCALE', 'LONG_SCALE', 'HEIGHT_SCALE'):
src_ds = gdal.GetDriverByName('MEM').Create('', 1, 1)
src_md = copy.copy(src_md_max_precision)
if src_md[key].find('-') >= 0:
src_md[key] = '-1' + src_md[key][1:]
else:
src_md[key] = '1' + src_md[key]
src_ds.SetMetadata(src_md, 'RPC')
with gdaltest.error_handler():
ds = gdal.GetDriverByName('NITF').CreateCopy('/vsimem/nitf_72.ntf', src_ds)
assert ds is None, ('fail: expected failure for %s' % key)
# Test out of rangeon coefficient lines
src_ds = gdal.GetDriverByName('MEM').Create('', 1, 1)
src_md = copy.copy(src_md_max_precision)
src_md['LINE_NUM_COEFF'] = '0 9.876543e10 9.876543e-9 -9.876543e+9 -9.876543e-9 0 9.876543e+9 9.876543e-9 -9.876543e+9 -9.876543e-9 0 9.876543e+9 9.876543e-9 -9.876543e+9 -9.876543e-9 0 9.876543e+9 9.876543e-9 -9.876543e+9 -9.876543e-9'
src_ds.SetMetadata(src_md, 'RPC')
with gdaltest.error_handler():
ds = gdal.GetDriverByName('NITF').CreateCopy('/vsimem/nitf_72.ntf', src_ds)
assert ds is None, 'fail: expected failure'
###############################################################################
# Test case for https://bugs.chromium.org/p/oss-fuzz/issues/detail?id=1525
def test_nitf_73():
with gdaltest.error_handler():
gdal.Open('data/nitf/oss_fuzz_1525.ntf')
###############################################################################
# Test cases for CCLSTA
# - Simple case
def test_nitf_74():
ds = gdal.GetDriverByName('NITF').Create('/vsimem/nitf_74.ntf', 1, 1, options=['FILE_TRE=CCINFA=0012AS 17ge:GENC:3:3-5:AUS00000'])
ds = None
ds = gdal.Open('/vsimem/nitf_74.ntf')
data = ds.GetMetadata('xml:TRE')[0]
ds = None
gdal.GetDriverByName('NITF').Delete('/vsimem/nitf_74.ntf')
expected_data = """<tres>
<tre name="CCINFA" location="file">
<field name="NUMCODE" value="001" />
<repeated name="CODES" number="1">
<group index="0">
<field name="CODE_LEN" value="2" />
<field name="CODE" value="AS" />
<field name="EQTYPE" value="" />
<field name="ESURN_LEN" value="17" />
<field name="ESURN" value="ge:GENC:3:3-5:AUS" />
<field name="DETAIL_LEN" value="00000" />
</group>
</repeated>
</tre>
</tres>
"""
assert data == expected_data
# - TABLE AG.2 case
def test_nitf_75():
listing_AG1 = """<?xml version="1.0" encoding="UTF-8"?>
<genc:GeopoliticalEntityEntry
xmlns:genc="http://api.nsgreg.nga.mil/schema/genc/3.0"
xmlns:genc-cmn="http://api.nsgreg.nga.mil/schema/genc/3.0/genc-cmn"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://api.nsgreg.nga.mil/schema/genc/3.0 http://api.nsgreg.nga.mil/schema/genc/3.0.0/genc.xsd">
<genc:encoding>
<genc-cmn:char3Code>MMR</genc-cmn:char3Code>
<genc-cmn:char3CodeURISet>
<genc-cmn:codespaceURL>http://api.nsgreg.nga.mil/geo-political/GENC/3/3-5</genc-cmn:codespaceURL>
<genc-cmn:codespaceURN>urn:us:gov:dod:nga:def:geo-political:GENC:3:3-5</genc-cmn:codespaceURN>
<genc-cmn:codespaceURNBased>geo-political:GENC:3:3-5</genc-cmn:codespaceURNBased>
<genc-cmn:codespaceURNBasedShort>ge:GENC:3:3-5</genc-cmn:codespaceURNBasedShort>
</genc-cmn:char3CodeURISet>
<genc-cmn:char2Code>MM</genc-cmn:char2Code>
<genc-cmn:char2CodeURISet>
<genc-cmn:codespaceURL>http://api.nsgreg.nga.mil/geo-political/GENC/2/3-5</genc-cmn:codespaceURL>
<genc-cmn:codespaceURN>urn:us:gov:dod:nga:def:geo-political:GENC:2:3-5</genc-cmn:codespaceURN>
<genc-cmn:codespaceURNBased>geo-political:GENC:2:3-5</genc-cmn:codespaceURNBased>
<genc-cmn:codespaceURNBasedShort>ge:GENC:2:3-5</genc-cmn:codespaceURNBasedShort>
</genc-cmn:char2CodeURISet>
<genc-cmn:numericCode>104</genc-cmn:numericCode>
<genc-cmn:numericCodeURISet>
<genc-cmn:codespaceURL>http://api.nsgreg.nga.mil/geo-political/GENC/n/3-5</genc-cmn:codespaceURL>
<genc-cmn:codespaceURN>urn:us:gov:dod:nga:def:geo-political:GENC:n:3-5</genc-cmn:codespaceURN>
<genc-cmn:codespaceURNBased>geo-political:GENC:n:3-5</genc-cmn:codespaceURNBased>
<genc-cmn:codespaceURNBasedShort>ge:GENC:n:3-5</genc-cmn:codespaceURNBasedShort>
</genc-cmn:numericCodeURISet>
</genc:encoding>
<genc:name><![CDATA[BURMA]]></genc:name>
<genc:shortName><![CDATA[Burma]]></genc:shortName>
<genc:fullName><![CDATA[Union of Burma]]></genc:fullName>
<genc:gencStatus>exception</genc:gencStatus>
<genc:entryDate>2016-09-30</genc:entryDate>
<genc:entryType>unchanged</genc:entryType>
<genc:usRecognition>independent</genc:usRecognition>
<genc:entryNotesOnNaming><![CDATA[
The GENC Standard specifies the name "BURMA" where instead ISO 3166-1 specifies "MYANMAR"; GENC specifies the short name "Burma" where instead ISO 3166-1 specifies "Myanmar"; and GENC specifies the full name "Union of Burma" where instead ISO 3166-1 specifies "the Republic of the Union of Myanmar". The GENC Standard specifies the local short name for 'my'/'mya' as "Myanma Naingngandaw" where instead ISO 3166-1 specifies "Myanma".
]]></genc:entryNotesOnNaming>
<genc:division codeSpace="as:GENC:6:3-5">MM-01</genc:division>
<genc:division codeSpace="as:GENC:6:3-5">MM-02</genc:division>
<genc:division codeSpace="as:GENC:6:3-5">MM-03</genc:division>
<genc:division codeSpace="as:GENC:6:3-5">MM-04</genc:division>
<genc:division codeSpace="as:GENC:6:3-5">MM-05</genc:division>
<genc:division codeSpace="as:GENC:6:3-5">MM-06</genc:division>
<genc:division codeSpace="as:GENC:6:3-5">MM-07</genc:division>
<genc:division codeSpace="as:GENC:6:3-5">MM-11</genc:division>
<genc:division codeSpace="as:GENC:6:3-5">MM-12</genc:division>
<genc:division codeSpace="as:GENC:6:3-5">MM-13</genc:division>
<genc:division codeSpace="as:GENC:6:3-5">MM-14</genc:division>
<genc:division codeSpace="as:GENC:6:3-5">MM-15</genc:division>
<genc:division codeSpace="as:GENC:6:3-5">MM-16</genc:division>
<genc:division codeSpace="as:GENC:6:3-5">MM-17</genc:division>
<genc:division codeSpace="as:GENC:6:3-5">MM-18</genc:division>
<genc:localShortName>
<genc:name><![CDATA[Myanma Naingngandaw]]></genc:name>
<genc:iso6393Char3Code>mya</genc:iso6393Char3Code>
</genc:localShortName>
</genc:GeopoliticalEntityEntry>"""
ds = gdal.GetDriverByName('NITF').Create('/vsimem/nitf_75.ntf', 1, 1, options=['TRE=CCINFA=0062RQ 17ge:GENC:3:3-5:PRI000002RQ 20as:ISO2:6:II-3:US-PR000002BM 17ge:GENC:3:3-5:MMR04108 ' +
listing_AG1 + '3MMR 19ge:ISO1:3:VII-7:MMR00000' + '2S1 19ge:GENC:3:3-alt:SCT000002YYC16gg:1059:2:ed9:3E00000'])
ds = None
ds = gdal.Open('/vsimem/nitf_75.ntf')
data = ds.GetMetadata('xml:TRE')[0]
ds = None
gdal.GetDriverByName('NITF').Delete('/vsimem/nitf_75.ntf')
expected_data = """<tres>
<tre name="CCINFA" location="image">
<field name="NUMCODE" value="006" />
<repeated name="CODES" number="6">
<group index="0">
<field name="CODE_LEN" value="2" />
<field name="CODE" value="RQ" />
<field name="EQTYPE" value="" />
<field name="ESURN_LEN" value="17" />
<field name="ESURN" value="ge:GENC:3:3-5:PRI" />
<field name="DETAIL_LEN" value="00000" />
</group>
<group index="1">
<field name="CODE_LEN" value="2" />
<field name="CODE" value="RQ" />
<field name="EQTYPE" value="" />
<field name="ESURN_LEN" value="20" />
<field name="ESURN" value="as:ISO2:6:II-3:US-PR" />
<field name="DETAIL_LEN" value="00000" />
</group>
<group index="2">
<field name="CODE_LEN" value="2" />
<field name="CODE" value="BM" />
<field name="EQTYPE" value="" />
<field name="ESURN_LEN" value="17" />
<field name="ESURN" value="ge:GENC:3:3-5:MMR" />
<field name="DETAIL_LEN" value="04108" />
<field name="DETAIL_CMPR" value="" />
<field name="DETAIL" value="<?xml version="1.0" encoding="UTF-8"?>
<genc:GeopoliticalEntityEntry
xmlns:genc="http://api.nsgreg.nga.mil/schema/genc/3.0"
xmlns:genc-cmn="http://api.nsgreg.nga.mil/schema/genc/3.0/genc-cmn"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://api.nsgreg.nga.mil/schema/genc/3.0 http://api.nsgreg.nga.mil/schema/genc/3.0.0/genc.xsd">
<genc:encoding>
<genc-cmn:char3Code>MMR</genc-cmn:char3Code>
<genc-cmn:char3CodeURISet>
<genc-cmn:codespaceURL>http://api.nsgreg.nga.mil/geo-political/GENC/3/3-5</genc-cmn:codespaceURL>
<genc-cmn:codespaceURN>urn:us:gov:dod:nga:def:geo-political:GENC:3:3-5</genc-cmn:codespaceURN>
<genc-cmn:codespaceURNBased>geo-political:GENC:3:3-5</genc-cmn:codespaceURNBased>
<genc-cmn:codespaceURNBasedShort>ge:GENC:3:3-5</genc-cmn:codespaceURNBasedShort>
</genc-cmn:char3CodeURISet>
<genc-cmn:char2Code>MM</genc-cmn:char2Code>
<genc-cmn:char2CodeURISet>
<genc-cmn:codespaceURL>http://api.nsgreg.nga.mil/geo-political/GENC/2/3-5</genc-cmn:codespaceURL>
<genc-cmn:codespaceURN>urn:us:gov:dod:nga:def:geo-political:GENC:2:3-5</genc-cmn:codespaceURN>
<genc-cmn:codespaceURNBased>geo-political:GENC:2:3-5</genc-cmn:codespaceURNBased>
<genc-cmn:codespaceURNBasedShort>ge:GENC:2:3-5</genc-cmn:codespaceURNBasedShort>
</genc-cmn:char2CodeURISet>
<genc-cmn:numericCode>104</genc-cmn:numericCode>
<genc-cmn:numericCodeURISet>
<genc-cmn:codespaceURL>http://api.nsgreg.nga.mil/geo-political/GENC/n/3-5</genc-cmn:codespaceURL>
<genc-cmn:codespaceURN>urn:us:gov:dod:nga:def:geo-political:GENC:n:3-5</genc-cmn:codespaceURN>
<genc-cmn:codespaceURNBased>geo-political:GENC:n:3-5</genc-cmn:codespaceURNBased>
<genc-cmn:codespaceURNBasedShort>ge:GENC:n:3-5</genc-cmn:codespaceURNBasedShort>
</genc-cmn:numericCodeURISet>
</genc:encoding>
<genc:name><![CDATA[BURMA]]></genc:name>
<genc:shortName><![CDATA[Burma]]></genc:shortName>
<genc:fullName><![CDATA[Union of Burma]]></genc:fullName>
<genc:gencStatus>exception</genc:gencStatus>
<genc:entryDate>2016-09-30</genc:entryDate>
<genc:entryType>unchanged</genc:entryType>
<genc:usRecognition>independent</genc:usRecognition>
<genc:entryNotesOnNaming><![CDATA[
The GENC Standard specifies the name "BURMA" where instead ISO 3166-1 specifies "MYANMAR"; GENC specifies the short name "Burma" where instead ISO 3166-1 specifies "Myanmar"; and GENC specifies the full name "Union of Burma" where instead ISO 3166-1 specifies "the Republic of the Union of Myanmar". The GENC Standard specifies the local short name for 'my'/'mya' as "Myanma Naingngandaw" where instead ISO 3166-1 specifies "Myanma".
]]></genc:entryNotesOnNaming>
<genc:division codeSpace="as:GENC:6:3-5">MM-01</genc:division>
<genc:division codeSpace="as:GENC:6:3-5">MM-02</genc:division>
<genc:division codeSpace="as:GENC:6:3-5">MM-03</genc:division>
<genc:division codeSpace="as:GENC:6:3-5">MM-04</genc:division>
<genc:division codeSpace="as:GENC:6:3-5">MM-05</genc:division>
<genc:division codeSpace="as:GENC:6:3-5">MM-06</genc:division>
<genc:division codeSpace="as:GENC:6:3-5">MM-07</genc:division>
<genc:division codeSpace="as:GENC:6:3-5">MM-11</genc:division>
<genc:division codeSpace="as:GENC:6:3-5">MM-12</genc:division>
<genc:division codeSpace="as:GENC:6:3-5">MM-13</genc:division>
<genc:division codeSpace="as:GENC:6:3-5">MM-14</genc:division>
<genc:division codeSpace="as:GENC:6:3-5">MM-15</genc:division>
<genc:division codeSpace="as:GENC:6:3-5">MM-16</genc:division>
<genc:division codeSpace="as:GENC:6:3-5">MM-17</genc:division>
<genc:division codeSpace="as:GENC:6:3-5">MM-18</genc:division>
<genc:localShortName>
<genc:name><![CDATA[Myanma Naingngandaw]]></genc:name>
<genc:iso6393Char3Code>mya</genc:iso6393Char3Code>
</genc:localShortName>
</genc:GeopoliticalEntityEntry>" />
</group>
<group index="3">
<field name="CODE_LEN" value="3" />
<field name="CODE" value="MMR" />
<field name="EQTYPE" value="" />
<field name="ESURN_LEN" value="19" />
<field name="ESURN" value="ge:ISO1:3:VII-7:MMR" />
<field name="DETAIL_LEN" value="00000" />
</group>
<group index="4">
<field name="CODE_LEN" value="2" />
<field name="CODE" value="S1" />
<field name="EQTYPE" value="" />
<field name="ESURN_LEN" value="19" />
<field name="ESURN" value="ge:GENC:3:3-alt:SCT" />
<field name="DETAIL_LEN" value="00000" />
</group>
<group index="5">
<field name="CODE_LEN" value="2" />
<field name="CODE" value="YY" />
<field name="EQTYPE" value="C" />
<field name="ESURN_LEN" value="16" />
<field name="ESURN" value="gg:1059:2:ed9:3E" />
<field name="DETAIL_LEN" value="00000" />
</group>
</repeated>
</tre>
</tres>
"""
assert data == expected_data
###############################################################################
# Test parsing MATESA TRE (STDI-0002 App AK)
def test_nitf_76():
ds = gdal.GetDriverByName('NITF').Create('/vsimem/nitf_76.ntf', 1, 1, options=['FILE_TRE=MATESA=EO-1_HYPERION FTITLE 006307APR2005_Hyperion_331406N0442000E_SWIR172_001_L1R-01B-BIB-GLAS0005RADIOMTRC_CALIB 0001EO-1_HYPERION FILENAME 0020HypGain_revC.dat.svfPARENT 0001EO-1_HYPERION FILENAME 0032EO12005097_020D020C_r1_WPS_01.L0PRE_DARKCOLLECT 0001EO-1_HYPERION FILENAME 0032EO12005097_020A0209_r1_WPS_01.L0POST_DARKCOLLECT 0001EO-1_HYPERION FILENAME 0032EO12005097_020F020E_r1_WPS_01.L0PARENT 0003EO-1_HYPERION FILENAME 0026EO1H1680372005097110PZ.L1REO-1_HYPERION FILENAME 0026EO1H1680372005097110PZ.AUXEO-1_HYPERION FILENAME 0026EO1H1680372005097110PZ.MET'])
ds = None
ds = gdal.Open('/vsimem/nitf_76.ntf')
data = ds.GetMetadata('xml:TRE')[0]
ds = None
gdal.GetDriverByName('NITF').Delete('/vsimem/nitf_76.ntf')
expected_data = """<tres>
<tre name="MATESA" location="file">
<field name="CUR_SOURCE" value="EO-1_HYPERION" />
<field name="CUR_MATE_TYPE" value="FTITLE" />
<field name="CUR_FILE_ID_LEN" value="0063" />
<field name="CUR_FILE_ID" value="07APR2005_Hyperion_331406N0442000E_SWIR172_001_L1R-01B-BIB-GLAS" />
<field name="NUM_GROUPS" value="0005" />
<repeated name="GROUPS" number="5">
<group index="0">
<field name="RELATIONSHIP" value="RADIOMTRC_CALIB" />
<field name="NUM_MATES" value="0001" />
<repeated name="MATES" number="1">
<group index="0">
<field name="SOURCE" value="EO-1_HYPERION" />
<field name="MATE_TYPE" value="FILENAME" />
<field name="MATE_ID_LEN" value="0020" />
<field name="MATE_ID" value="HypGain_revC.dat.svf" />
</group>
</repeated>
</group>
<group index="1">
<field name="RELATIONSHIP" value="PARENT" />
<field name="NUM_MATES" value="0001" />
<repeated name="MATES" number="1">
<group index="0">
<field name="SOURCE" value="EO-1_HYPERION" />
<field name="MATE_TYPE" value="FILENAME" />
<field name="MATE_ID_LEN" value="0032" />
<field name="MATE_ID" value="EO12005097_020D020C_r1_WPS_01.L0" />
</group>
</repeated>
</group>
<group index="2">
<field name="RELATIONSHIP" value="PRE_DARKCOLLECT" />
<field name="NUM_MATES" value="0001" />
<repeated name="MATES" number="1">
<group index="0">
<field name="SOURCE" value="EO-1_HYPERION" />
<field name="MATE_TYPE" value="FILENAME" />
<field name="MATE_ID_LEN" value="0032" />
<field name="MATE_ID" value="EO12005097_020A0209_r1_WPS_01.L0" />
</group>
</repeated>
</group>
<group index="3">
<field name="RELATIONSHIP" value="POST_DARKCOLLECT" />
<field name="NUM_MATES" value="0001" />
<repeated name="MATES" number="1">
<group index="0">
<field name="SOURCE" value="EO-1_HYPERION" />
<field name="MATE_TYPE" value="FILENAME" />
<field name="MATE_ID_LEN" value="0032" />
<field name="MATE_ID" value="EO12005097_020F020E_r1_WPS_01.L0" />
</group>
</repeated>
</group>
<group index="4">
<field name="RELATIONSHIP" value="PARENT" />
<field name="NUM_MATES" value="0003" />
<repeated name="MATES" number="3">
<group index="0">
<field name="SOURCE" value="EO-1_HYPERION" />
<field name="MATE_TYPE" value="FILENAME" />
<field name="MATE_ID_LEN" value="0026" />
<field name="MATE_ID" value="EO1H1680372005097110PZ.L1R" />
</group>
<group index="1">
<field name="SOURCE" value="EO-1_HYPERION" />
<field name="MATE_TYPE" value="FILENAME" />
<field name="MATE_ID_LEN" value="0026" />
<field name="MATE_ID" value="EO1H1680372005097110PZ.AUX" />
</group>
<group index="2">
<field name="SOURCE" value="EO-1_HYPERION" />
<field name="MATE_TYPE" value="FILENAME" />
<field name="MATE_ID_LEN" value="0026" />
<field name="MATE_ID" value="EO1H1680372005097110PZ.MET" />
</group>
</repeated>
</group>
</repeated>
</tre>
</tres>
"""
assert data == expected_data
###############################################################################
# Test parsing MATESA TRE (STDI-0002 App AK)
def test_nitf_77():
ds = gdal.GetDriverByName('NITF').Create('/vsimem/nitf_77.ntf', 1, 1, options=['TRE=GRDPSB=01+000027.81PIX_LATLON0000000000010000000000010000000000000000000000'])
ds = None
ds = gdal.Open('/vsimem/nitf_77.ntf')
data = ds.GetMetadata('xml:TRE')[0]
ds = None
gdal.GetDriverByName('NITF').Delete('/vsimem/nitf_77.ntf')
expected_data = """<tres>
<tre name="GRDPSB" location="image">
<field name="NUM_GRDS" value="01" />
<repeated name="GRDS" number="1">
<group index="0">
<field name="ZVL" value="+000027.81" />
<field name="BAD" value="PIX_LATLON" />
<field name="LOD" value="000000000001" />
<field name="LAD" value="000000000001" />
<field name="LSO" value="00000000000" />
<field name="PSO" value="00000000000" />
</group>
</repeated>
</tre>
</tres>
"""
assert data == expected_data
###############################################################################
# Test parsing BANDSB TRE (STDI-0002 App X)
def test_nitf_78():
float_data = "40066666" # == struct.pack(">f", 2.1).hex()
bit_mask = "89800000" # Set bits 31, 27, 24, 23
tre_data = "TRE=HEX/BANDSB=" + hex_string("00001RADIANCE S") + float_data*2 + \
hex_string("0030.00M0030.00M-------M-------M ") + \
bit_mask + hex_string("DETECTOR ") + float_data + hex_string("U00.851920.01105")
ds = gdal.GetDriverByName('NITF').Create('/vsimem/nitf_78.ntf', 1, 1, options=[tre_data])
ds = None
ds = gdal.Open('/vsimem/nitf_78.ntf')
data = ds.GetMetadata('xml:TRE')[0]
ds = None
gdal.GetDriverByName('NITF').Delete('/vsimem/nitf_78.ntf')
expected_data = """<tres>
<tre name="BANDSB" location="image">
<field name="COUNT" value="00001" />
<field name="RADIOMETRIC_QUANTITY" value="RADIANCE" />
<field name="RADIOMETRIC_QUANTITY_UNIT" value="S" />
<field name="SCALE_FACTOR" value="2.100000" />
<field name="ADDITIVE_FACTOR" value="2.100000" />
<field name="ROW_GSD" value="0030.00" />
<field name="ROW_GSD_UNIT" value="M" />
<field name="COL_GSD" value="0030.00" />
<field name="COL_GSD_UNIT" value="M" />
<field name="SPT_RESP_ROW" value="-------" />
<field name="SPT_RESP_UNIT_ROW" value="M" />
<field name="SPT_RESP_COL" value="-------" />
<field name="SPT_RESP_UNIT_COL" value="M" />
<field name="DATA_FLD_1" value="" />
<field name="EXISTENCE_MASK" value="2306867200" />
<field name="RADIOMETRIC_ADJUSTMENT_SURFACE" value="DETECTOR" />
<field name="ATMOSPHERIC_ADJUSTMENT_ALTITUDE" value="2.100000" />
<field name="WAVE_LENGTH_UNIT" value="U" />
<repeated name="BANDS" number="1">
<group index="0">
<field name="BAD_BAND" value="0" />
<field name="CWAVE" value="0.85192" />
<field name="FWHM" value="0.01105" />
</group>
</repeated>
</tre>
</tres>
"""
assert data == expected_data
###############################################################################
# Test parsing ACCHZB TRE (STDI-0002-1-v5.0 Appendix P)
def test_nitf_79():
tre_data = "TRE=ACCHZB=01M 00129M 00129004+044.4130499724+33.69234401034+044.4945572008" \
"+33.67855217830+044.1731373448+32.79106350687+044.2538103407+32.77733592314"
ds = gdal.GetDriverByName('NITF').Create('/vsimem/nitf_79.ntf', 1, 1, options=[tre_data])
ds = None
ds = gdal.Open('/vsimem/nitf_79.ntf')
data = ds.GetMetadata('xml:TRE')[0]
ds = None
gdal.GetDriverByName('NITF').Delete('/vsimem/nitf_79.ntf')
expected_data = """<tres>
<tre name="ACCHZB" location="image">
<field name="NUM_ACHZ" value="01" />
<repeated number="1">
<group index="0">
<field name="UNIAAH" value="M" />
<field name="AAH" value="00129" />
<field name="UNIAPH" value="M" />
<field name="APH" value="00129" />
<field name="NUM_PTS" value="004" />
<repeated number="4">
<group index="0">
<field name="LON" value="+044.4130499724" />
<field name="LAT" value="+33.69234401034" />
</group>
<group index="1">
<field name="LON" value="+044.4945572008" />
<field name="LAT" value="+33.67855217830" />
</group>
<group index="2">
<field name="LON" value="+044.1731373448" />
<field name="LAT" value="+32.79106350687" />
</group>
<group index="3">
<field name="LON" value="+044.2538103407" />
<field name="LAT" value="+32.77733592314" />
</group>
</repeated>
</group>
</repeated>
</tre>
</tres>
"""
assert data == expected_data
###############################################################################
# Test parsing ACCVTB TRE (STDI-0002-1-v5.0 Appendix P)
def test_nitf_80():
tre_data = "TRE=ACCVTB=01M 00095M 00095004+044.4130499724+33.69234401034+044.4945572008" \
"+33.67855217830+044.1731373448+32.79106350687+044.2538103407+32.77733592314"
ds = gdal.GetDriverByName('NITF').Create('/vsimem/nitf_80.ntf', 1, 1, options=[tre_data])
ds = None
ds = gdal.Open('/vsimem/nitf_80.ntf')
data = ds.GetMetadata('xml:TRE')[0]
ds = None
gdal.GetDriverByName('NITF').Delete('/vsimem/nitf_80.ntf')
expected_data = """<tres>
<tre name="ACCVTB" location="image">
<field name="NUM_ACVT" value="01" />
<repeated number="1">
<group index="0">
<field name="UNIAAV" value="M" />
<field name="AAV" value="00095" />
<field name="UNIAPV" value="M" />
<field name="APV" value="00095" />
<field name="NUM_PTS" value="004" />
<repeated number="4">
<group index="0">
<field name="LON" value="+044.4130499724" />
<field name="LAT" value="+33.69234401034" />
</group>
<group index="1">
<field name="LON" value="+044.4945572008" />
<field name="LAT" value="+33.67855217830" />
</group>
<group index="2">
<field name="LON" value="+044.1731373448" />
<field name="LAT" value="+32.79106350687" />
</group>
<group index="3">
<field name="LON" value="+044.2538103407" />
<field name="LAT" value="+32.77733592314" />
</group>
</repeated>
</group>
</repeated>
</tre>
</tres>
"""
assert data == expected_data
###############################################################################
# Test parsing MSTGTA TRE (STDI-0002-1-v5.0 App E)
def test_nitf_81():
tre_data = "TRE=MSTGTA=012340123456789AB0123456789ABCDE0120123456789AB0123456789AB000123401234560123450TGT_LOC= "
ds = gdal.GetDriverByName('NITF').Create('/vsimem/nitf_81.ntf', 1, 1, options=[tre_data])
ds = None
ds = gdal.Open('/vsimem/nitf_81.ntf')
data = ds.GetMetadata('xml:TRE')[0]
ds = None
gdal.GetDriverByName('NITF').Delete('/vsimem/nitf_81.ntf')
expected_data = """<tres>
<tre name="MSTGTA" location="image">
<field name="TGT_NUM" value="01234" />
<field name="TGT_ID" value="0123456789AB" />
<field name="TGT_BE" value="0123456789ABCDE" />
<field name="TGT_PRI" value="012" />
<field name="TGT_REQ" value="0123456789AB" />
<field name="TGT_LTIOV" value="0123456789AB" />
<field name="TGT_TYPE" value="0" />
<field name="TGT_COLL" value="0" />
<field name="TGT_CAT" value="01234" />
<field name="TGT_UTC" value="0123456" />
<field name="TGT_ELEV" value="012345" />
<field name="TGT_ELEV_UNIT" value="0" />
<field name="TGT_LOC" value="TGT_LOC=" />
</tre>
</tres>
"""
assert data == expected_data
###############################################################################
# Test parsing PIATGB TRE (STDI-0002-1-v5.0 App C)
def test_nitf_82():
tre_data = "TRE=PIATGB=0123456789ABCDE0123456789ABCDE01012340123456789ABCDE012" \
"TGTNAME= 012+01.234567-012.345678"
ds = gdal.GetDriverByName('NITF').Create('/vsimem/nitf_82.ntf', 1, 1, options=[tre_data])
ds = None
ds = gdal.Open('/vsimem/nitf_82.ntf')
data = ds.GetMetadata('xml:TRE')[0]
ds = None
gdal.GetDriverByName('NITF').Delete('/vsimem/nitf_82.ntf')
expected_data = """<tres>
<tre name="PIATGB" location="image">
<field name="TGTUTM" value="0123456789ABCDE" />
<field name="PIATGAID" value="0123456789ABCDE" />
<field name="PIACTRY" value="01" />
<field name="PIACAT" value="01234" />
<field name="TGTGEO" value="0123456789ABCDE" />
<field name="DATUM" value="012" />
<field name="TGTNAME" value="TGTNAME=" />
<field name="PERCOVER" value="012" />
<field name="TGTLAT" value="+01.234567" />
<field name="TGTLON" value="-012.345678" />
</tre>
</tres>
"""
assert data == expected_data
###############################################################################
# Test parsing PIXQLA TRE (STDI-0002-1-v5.0 App AA)
def test_nitf_83():
tre_data = "TRE=PIXQLA=00100200031Dead " \
"Saturated Bad "
ds = gdal.GetDriverByName('NITF').Create('/vsimem/nitf_83.ntf', 1, 1, options=[tre_data])
ds = None
ds = gdal.Open('/vsimem/nitf_83.ntf')
data = ds.GetMetadata('xml:TRE')[0]
ds = None
gdal.GetDriverByName('NITF').Delete('/vsimem/nitf_83.ntf')
expected_data = """<tres>
<tre name="PIXQLA" location="image">
<field name="NUMAIS" value="001" />
<repeated number="1">
<group index="0">
<field name="AISDLVL" value="002" />
</group>
</repeated>
<field name="NPIXQUAL" value="0003" />
<field name="PQ_BIT_VALUE" value="1" />
<repeated number="3">
<group index="0">
<field name="PQ_CONDITION" value="Dead" />
</group>
<group index="1">
<field name="PQ_CONDITION" value="Saturated" />
</group>
<group index="2">
<field name="PQ_CONDITION" value="Bad" />
</group>
</repeated>
</tre>
</tres>
"""
assert data == expected_data
###############################################################################
# Test parsing PIXMTA TRE (STDI-0002-1-v5.0 App AJ)
def test_nitf_84():
tre_data = "TRE=PIXMTA=0010020.00000000E+000.00000000E+001.00000000E+003.35200000E+03F00001P" \
"BAND_WAVELENGTH micron D00000"
ds = gdal.GetDriverByName('NITF').Create('/vsimem/nitf_84.ntf', 1, 1, options=[tre_data])
ds = None
ds = gdal.Open('/vsimem/nitf_84.ntf')
data = ds.GetMetadata('xml:TRE')[0]
ds = None
gdal.GetDriverByName('NITF').Delete('/vsimem/nitf_84.ntf')
expected_data = """<tres>
<tre name="PIXMTA" location="image">
<field name="NUMAIS" value="001" />
<repeated number="1">
<group index="0">
<field name="AISDLVL" value="002" />
</group>
</repeated>
<field name="ORIGIN_X" value="0.00000000E+00" />
<field name="ORIGIN_Y" value="0.00000000E+00" />
<field name="SCALE_X" value="1.00000000E+00" />
<field name="SCALE_Y" value="3.35200000E+03" />
<field name="SAMPLE_MODE" value="F" />
<field name="NUMMETRICS" value="00001" />
<field name="PERBAND" value="P" />
<repeated number="1">
<group index="0">
<field name="DESCRIPTION" value="BAND_WAVELENGTH" />
<field name="UNIT" value="micron" />
<field name="FITTYPE" value="D" />
</group>
</repeated>
<field name="RESERVED_LEN" value="00000" />
</tre>
</tres>
"""
assert data == expected_data
###############################################################################
# Test creating a TRE with a hexadecimal string
def test_nitf_85():
ds = gdal.GetDriverByName('NITF').Create('/vsimem/nitf_85.ntf', 1, 1, options=["TRE=HEX/TSTTRE=414243"])
ds = None
ds = gdal.Open('/vsimem/nitf_85.ntf')
data = ds.GetMetadata('TRE')['TSTTRE']
ds = None
gdal.GetDriverByName('NITF').Delete('/vsimem/nitf_85.ntf')
expected_data = "ABC"
assert data == expected_data
###############################################################################
# Test parsing CSEXRB TRE (STDI-0002-1-v5.0 App AH)
def test_nitf_86():
tre_data = "TRE=HEX/CSEXRB=" + hex_string("824ecf8e-1041-4cce-9edb-bc92d88624ca0047308e4b1-80e4-4777-b70f-f6e4a6881de9") + \
hex_string("17261ee9-2175-4ff2-86ad-dddda1f8270ccf306a0b-c47c-44fa-af63-463549f6bf98fd99a346-770e-4048-94d8-5a8b2e832b32") + \
hex_string("EO-1 HYPERNHYPERNF+03819809.03+03731961.77+03475785.73000000000120201012145900.000000000") + \
"0100000000000000" + "05" + "0000000100000001" "FFFFFFFFFF" + \
hex_string(" 1181.1 65535000335200256250.000") + \
hex_string(" 0000132.812+54.861 9991000000")
ds = gdal.GetDriverByName('NITF').Create('/vsimem/nitf_86.ntf', 1, 1, options=[tre_data])
ds = None
ds = gdal.Open('/vsimem/nitf_86.ntf')
data = ds.GetMetadata('xml:TRE')[0]
ds = None
gdal.GetDriverByName('NITF').Delete('/vsimem/nitf_86.ntf')
expected_data = """<tres>
<tre name="CSEXRB" location="image">
<field name="IMAGE_UUID" value="824ecf8e-1041-4cce-9edb-bc92d88624ca" />
<field name="NUM_ASSOC_DES" value="004" />
<repeated number="4">
<group index="0">
<field name="ASSOC_DES_ID" value="7308e4b1-80e4-4777-b70f-f6e4a6881de9" />
</group>
<group index="1">
<field name="ASSOC_DES_ID" value="17261ee9-2175-4ff2-86ad-dddda1f8270c" />
</group>
<group index="2">
<field name="ASSOC_DES_ID" value="cf306a0b-c47c-44fa-af63-463549f6bf98" />
</group>
<group index="3">
<field name="ASSOC_DES_ID" value="fd99a346-770e-4048-94d8-5a8b2e832b32" />
</group>
</repeated>
<field name="PLATFORM_ID" value="EO-1" />
<field name="PAYLOAD_ID" value="HYPERN" />
<field name="SENSOR_ID" value="HYPERN" />
<field name="SENSOR_TYPE" value="F" />
<field name="GROUND_REF_POINT_X" value="+03819809.03" />
<field name="GROUND_REF_POINT_Y" value="+03731961.77" />
<field name="GROUND_REF_POINT_Z" value="+03475785.73" />
<field name="TIME_STAMP_LOC" value="0" />
<field name="REFERENCE_FRAME_NUM" value="000000001" />
<field name="BASE_TIMESTAMP" value="20201012145900.000000000" />
<field name="DT_MULTIPLIER" value="72057594037927936" />
<field name="DT_SIZE" value="5" />
<field name="NUMBER_FRAMES" value="1" />
<field name="NUMBER_DT" value="1" />
<repeated number="1">
<group index="0">
<field name="DT" value="1099511627775" />
</group>
</repeated>
<field name="MAX_GSD" value="" />
<field name="ALONG_SCAN_GSD" value="" />
<field name="CROSS_SCAN_GSD" value="" />
<field name="GEO_MEAN_GSD" value="1181.1" />
<field name="A_S_VERT_GSD" value="" />
<field name="C_S_VERT_GSD" value="" />
<field name="GEO_MEAN_VERT_GSD" value="" />
<field name="GSD_BETA_ANGLE" value="" />
<field name="DYNAMIC_RANGE" value="65535" />
<field name="NUM_LINES" value="0003352" />
<field name="NUM_SAMPLES" value="00256" />
<field name="ANGLE_TO_NORTH" value="250.000" />
<field name="OBLIQUITY_ANGLE" value="" />
<field name="AZ_OF_OBLIQUITY" value="" />
<field name="ATM_REFR_FLAG" value="0" />
<field name="VEL_ABER_FLAG" value="0" />
<field name="GRD_COVER" value="0" />
<field name="SNOW_DEPTH_CATEGORY" value="0" />
<field name="SUN_AZIMUTH" value="132.812" />
<field name="SUN_ELEVATION" value="+54.861" />
<field name="PREDICTED_NIIRS" value="" />
<field name="CIRCL_ERR" value="" />
<field name="LINEAR_ERR" value="" />
<field name="CLOUD_COVER" value="999" />
<field name="ROLLING_SHUTTER_FLAG" value="1" />
<field name="UE_TIME_FLAG" value="0" />
<field name="RESERVED_LEN" value="00000" />
</tre>
</tres>
"""
assert data == expected_data
###############################################################################
# Test parsing ILLUMB TRE (STDI-0002-1-v5.0 App AL)
def test_nitf_87():
mu = "B5" # \mu per ISO-8859-1
bit_mask = "7A0000"
tre_data = "TRE=HEX/ILLUMB=" + hex_string("0001") + \
mu + hex_string("m 8.5192000000E-01") + \
hex_string("2.5770800000E+00001NUM_BANDS=1 because ILLUMB has no band-dependent content ") + \
hex_string("World Geodetic System 1984 ") + \
hex_string("WGE World Geodetic System 1984 ") + \
hex_string("WE Geodetic ") + \
hex_string("GEOD") + \
bit_mask + hex_string("00120050407072410+33.234974+044.333405+27.8100000E+0132.8+54.9167.5+52.5") + \
hex_string("-163.4004099.2+84.0")
ds = gdal.GetDriverByName('NITF').Create('/vsimem/nitf_87.ntf', 1, 1, options=[tre_data])
ds = None
ds = gdal.Open('/vsimem/nitf_87.ntf')
data = ds.GetMetadata('xml:TRE')[0]
ds = None
gdal.GetDriverByName('NITF').Delete('/vsimem/nitf_87.ntf')
expected_data = """<tres>
<tre name="ILLUMB" location="image">
<field name="NUM_BANDS" value="0001" />
<field name="BAND_UNIT" value="µm" />
<repeated number="1">
<group index="0">
<field name="LBOUND" value="8.5192000000E-01" />
<field name="UBOUND" value="2.5770800000E+00" />
</group>
</repeated>
<field name="NUM_OTHERS" value="00" />
<field name="NUM_COMS" value="1" />
<repeated number="1">
<group index="0">
<field name="COMMENT" value="NUM_BANDS=1 because ILLUMB has no band-dependent content" />
</group>
</repeated>
<field name="GEO_DATUM" value="World Geodetic System 1984" />
<field name="GEO_DATUM_CODE" value="WGE" />
<field name="ELLIPSOID_NAME" value="World Geodetic System 1984" />
<field name="ELLIPSOID_CODE" value="WE" />
<field name="VERTICAL_DATUM_REF" value="Geodetic" />
<field name="VERTICAL_REF_CODE" value="GEOD" />
<field name="EXISTENCE_MASK" value="7995392" />
<field name="NUM_ILLUM_SETS" value="001" />
<repeated number="1">
<group index="0">
<field name="DATETIME" value="20050407072410" />
<field name="TARGET_LAT" value="+33.234974" />
<field name="TARGET_LON" value="+044.333405" />
<field name="TARGET_HGT" value="+27.8100000E+0" />
<field name="SUN_AZIMUTH" value="132.8" />
<field name="SUN_ELEV" value="+54.9" />
<field name="MOON_AZIMUTH" value="167.5" />
<field name="MOON_ELEV" value="+52.5" />
<field name="MOON_PHASE_ANGLE" value="-163.4" />
<field name="MOON_ILLUM_PERCENT" value="004" />
<field name="SENSOR_AZIMUTH" value="099.2" />
<field name="SENSOR_ELEV" value="+84.0" />
<repeated number="1">
<group index="0" />
</repeated>
</group>
</repeated>
</tre>
</tres>
"""
assert data == expected_data
###############################################################################
# Test parsing CSWRPB TRE (STDI-0002-1-v5.0 App AH)
def test_nitf_88():
tre_data = "TRE=CSWRPB=1F199.9999999900000010000002000000300000040000005000000600000070000008" \
"1111-9.99999999999999E-99+9.99999999999999E+9900000"
ds = gdal.GetDriverByName('NITF').Create('/vsimem/nitf_88.ntf', 1, 1, options=[tre_data])
ds = None
ds = gdal.Open('/vsimem/nitf_88.ntf')
data = ds.GetMetadata('xml:TRE')[0]
ds = None
gdal.GetDriverByName('NITF').Delete('/vsimem/nitf_88.ntf')
expected_data = """<tres>
<tre name="CSWRPB" location="image">
<field name="NUM_SETS_WARP_DATA" value="1" />
<field name="SENSOR_TYPE" value="F" />
<field name="WRP_INTERP" value="1" />
<repeated number="1">
<group index="0">
<field name="FL_WARP" value="99.99999999" />
<field name="OFFSET_LINE" value="0000001" />
<field name="OFFSET_SAMP" value="0000002" />
<field name="SCALE_LINE" value="0000003" />
<field name="SCALE_SAMP" value="0000004" />
<field name="OFFSET_LINE_UNWRP" value="0000005" />
<field name="OFFSET_SAMP_UNWRP" value="0000006" />
<field name="SCALE_LINE_UNWRP" value="0000007" />
<field name="SCALE_SAMP_UNWRP" value="0000008" />
<field name="LINE_POLY_ORDER_M1" value="1" />
<field name="LINE_POLY_ORDER_M2" value="1" />
<field name="SAMP_POLY_ORDER_N1" value="1" />
<field name="SAMP_POLY_ORDER_N2" value="1" />
<repeated number="1">
<group index="0">
<repeated number="1">
<group index="0">
<field name="A" value="-9.99999999999999E-99" />
</group>
</repeated>
</group>
</repeated>
<repeated number="1">
<group index="0">
<repeated number="1">
<group index="0">
<field name="B" value="+9.99999999999999E+99" />
</group>
</repeated>
</group>
</repeated>
</group>
</repeated>
<field name="RESERVED_LEN" value="00000" />
</tre>
</tres>
"""
assert data == expected_data
###############################################################################
# Test parsing CSRLSB TRE (STDI-0002-1-v5.0 App AH)
def test_nitf_89():
tre_data = "TRE=CSRLSB=0101+11111111.11-22222222.22+33333333.33-44444444.44"
ds = gdal.GetDriverByName('NITF').Create('/vsimem/nitf_89.ntf', 1, 1, options=[tre_data])
ds = None
ds = gdal.Open('/vsimem/nitf_89.ntf')
data = ds.GetMetadata('xml:TRE')[0]
ds = None
gdal.GetDriverByName('NITF').Delete('/vsimem/nitf_89.ntf')
expected_data = """<tres>
<tre name="CSRLSB" location="image">
<field name="N_RS_ROW_BLOCKS" value="01" />
<field name="M_RS_COLUMN_BLOCKS" value="01" />
<repeated number="1">
<group index="0">
<repeated number="1">
<group index="0">
<field name="RS_DT_1" value="+11111111.11" />
<field name="RS_DT_2" value="-22222222.22" />
<field name="RS_DT_3" value="+33333333.33" />
<field name="RS_DT_4" value="-44444444.44" />
</group>
</repeated>
</group>
</repeated>
</tre>
</tres>
"""
assert data == expected_data
###############################################################################
# Test parsing SECURA TRE (STDI-0002-1-v5.0 App AI)
def test_nitf_90():
tre_data = "FILE_TRE=SECURA=20201020142500NITF02.10" + " "*207 + "ARH.XML 00068" + \
"<?xml version=\"1.0\" encoding=\"UTF-8\"?> <arh:Security></arh:Security>"
ds = gdal.GetDriverByName('NITF').Create('/vsimem/nitf_90.ntf', 1, 1, options=[tre_data])
ds = None
ds = gdal.Open('/vsimem/nitf_90.ntf')
data = ds.GetMetadata('xml:TRE')[0]
ds = None
gdal.GetDriverByName('NITF').Delete('/vsimem/nitf_90.ntf')
expected_data = """<tres>
<tre name="SECURA" location="file">
<field name="FDATTIM" value="20201020142500" />
<field name="NITFVER" value="NITF02.10" />
<field name="NFSECFLDS" value="" />
<field name="SECSTD" value="ARH.XML" />
<field name="SECCOMP" value="" />
<field name="SECLEN" value="00068" />
<field name="SECURITY" value="<?xml version="1.0" encoding="UTF-8"?> <arh:Security></arh:Security>" />
</tre>
</tres>
"""
assert data == expected_data
###############################################################################
# Test parsing SNSPSB TRE (STDI-0002-1-v5.0 App P)
def test_nitf_91():
tre_data = "TRE=SNSPSB=010001111112222233333M 000001000001000001000001GSL " + \
"PLTFM INS MOD PRL SID ACT DEG0000001 +111111.11-222222.22" + \
" meters 000000000000000000000011111111111111111111112222222222222222222222001" + \
"API Imeters 0123456789"
ds = gdal.GetDriverByName('NITF').Create('/vsimem/nitf_91.ntf', 1, 1, options=[tre_data])
ds = None
ds = gdal.Open('/vsimem/nitf_91.ntf')
data = ds.GetMetadata('xml:TRE')[0]
ds = None
gdal.GetDriverByName('NITF').Delete('/vsimem/nitf_91.ntf')
expected_data = """<tres>
<tre name="SNSPSB" location="image">
<field name="NUM_SNS" value="01" />
<repeated number="1">
<group index="0">
<field name="NUM_BP" value="00" />
<field name="NUM_BND" value="01" />
<repeated number="1">
<group index="0">
<field name="BID" value="11111" />
<field name="WS1" value="22222" />
<field name="WS2" value="33333" />
</group>
</repeated>
<field name="UNIRES" value="M" />
<field name="REX" value="000001" />
<field name="REY" value="000001" />
<field name="GSX" value="000001" />
<field name="GSY" value="000001" />
<field name="GSL" value="GSL" />
<field name="PLTFM" value="PLTFM" />
<field name="INS" value="INS" />
<field name="MOD" value="MOD" />
<field name="PRL" value="PRL" />
<field name="SID" value="SID" />
<field name="ACT" value="ACT" />
<field name="UNINOA" value="DEG" />
<field name="NOA" value="0000001" />
<field name="UNIANG" value="" />
<field name="UNIALT" value="" />
<field name="LONSCC" value="+111111.11" />
<field name="LATSCC" value="-222222.22" />
<field name="UNISAE" value="" />
<field name="UNIRPY" value="" />
<field name="UNIPXT" value="" />
<field name="UNISPE" value="meters" />
<field name="ROS" value="0000000000000000000000" />
<field name="PIS" value="1111111111111111111111" />
<field name="YAS" value="2222222222222222222222" />
<field name="NUM_AUX" value="001" />
<repeated number="1">
<group index="0">
<field name="API" value="API" />
<field name="APF" value="I" />
<field name="UNIAPX" value="meters" />
<field name="APN" value="0123456789" />
</group>
</repeated>
</group>
</repeated>
</tre>
</tres>
"""
assert data == expected_data
###############################################################################
# Test parsing RSMAPB TRE (STDI-0002-1-v5.0 App U)
def test_nitf_RSMAPB():
tre_data = "TRE=RSMAPB=iid " + \
"edition tid 01IG+9.99999999999999E+99" + \
"+9.99999999999999E+99+9.99999999999999E+99+9.99999999999999E+99+9.99999999999999E+99+9.99999999999999E+99" + \
"Y01011230001+9.99999999999999E+99+9.99999999999999E+99"
ds = gdal.GetDriverByName('NITF').Create('/vsimem/nitf_RSMAPB.ntf', 1, 1, options=[tre_data])
ds = None
ds = gdal.Open('/vsimem/nitf_RSMAPB.ntf')
data = ds.GetMetadata('xml:TRE')[0]
ds = None
gdal.GetDriverByName('NITF').Delete('/vsimem/nitf_RSMAPB.ntf')
expected_data = """<tres>
<tre name="RSMAPB" location="image">
<field name="IID" value="iid" />
<field name="EDITION" value="edition" />
<field name="TID" value="tid" />
<field name="NPAR" value="01" />
<field name="APTYP" value="I" />
<field name="LOCTYP" value="G" />
<field name="NSFX" value="+9.99999999999999E+99" />
<field name="NSFY" value="+9.99999999999999E+99" />
<field name="NSFZ" value="+9.99999999999999E+99" />
<field name="NOFFX" value="+9.99999999999999E+99" />
<field name="NOFFY" value="+9.99999999999999E+99" />
<field name="NOFFZ" value="+9.99999999999999E+99" />
<field name="APBASE" value="Y" />
<field name="NISAP" value="01" />
<field name="NISAPR" value="01" />
<repeated number="1">
<group index="0">
<field name="XPWRR" value="1" />
<field name="YPWRR" value="2" />
<field name="ZPWRR" value="3" />
</group>
</repeated>
<field name="NISAPC" value="00" />
<field name="NBASIS" value="01" />
<repeated number="1">
<group index="0">
<repeated number="1">
<group index="0">
<field name="AEL" value="+9.99999999999999E+99" />
</group>
</repeated>
</group>
</repeated>
<repeated number="1">
<group index="0">
<field name="PARVAL" value="+9.99999999999999E+99" />
</group>
</repeated>
</tre>
</tres>
"""
assert data == expected_data
###############################################################################
# Test parsing RSMDCB TRE (STDI-0002-1-v5.0 App U)
def test_nitf_RSMDCB():
tre_data = "TRE=RSMDCB=iid " + \
"edition tid 01001iidi" + " "*76 + \
"01Y01GN" + "+9.99999999999999E+99"*6 + "N01ABCD+9.99999999999999E+99"
ds = gdal.GetDriverByName('NITF').Create('/vsimem/nitf_RSMDCB.ntf', 1, 1, options=[tre_data])
ds = None
ds = gdal.Open('/vsimem/nitf_RSMDCB.ntf')
data = ds.GetMetadata('xml:TRE')[0]
ds = None
gdal.GetDriverByName('NITF').Delete('/vsimem/nitf_RSMDCB.ntf')
expected_data = """<tres>
<tre name="RSMDCB" location="image">
<field name="IID" value="iid" />
<field name="EDITION" value="edition" />
<field name="TID" value="tid" />
<field name="NROWCB" value="01" />
<field name="NIMGE" value="001" />
<repeated number="1">
<group index="0">
<field name="IIDI" value="iidi" />
<field name="NCOLCB" value="01" />
</group>
</repeated>
<field name="INCAPD" value="Y" />
<field name="NPAR" value="01" />
<field name="APTYP" value="G" />
<field name="LOCTYP" value="N" />
<field name="NSFX" value="+9.99999999999999E+99" />
<field name="NSFY" value="+9.99999999999999E+99" />
<field name="NSFZ" value="+9.99999999999999E+99" />
<field name="NOFFX" value="+9.99999999999999E+99" />
<field name="NOFFY" value="+9.99999999999999E+99" />
<field name="NOFFZ" value="+9.99999999999999E+99" />
<field name="APBASE" value="N" />
<field name="NGSAP" value="01" />
<repeated number="1">
<group index="0">
<field name="GSAPID" value="ABCD" />
</group>
</repeated>
<repeated number="1">
<group index="0">
<repeated number="1">
<group index="0">
<repeated number="1">
<group index="0">
<field name="CRSCOV" value="+9.99999999999999E+99" />
</group>
</repeated>
</group>
</repeated>
</group>
</repeated>
</tre>
</tres>
"""
assert data == expected_data
###############################################################################
# Test parsing RSMECB TRE (STDI-0002-1-v5.0 App U)
def test_nitf_RSMECB():
tre_data = "TRE=RSMECB=iid " + \
"edition tid " + \
"YY01012020110201GN" + "+9.99999999999999E+99"*6 + "N01ABCD02" + "+9.99999999999999E+99"*3 + \
"1N2" + "+9.99999999999999E+99"*8 + "N2" + "+9.99999999999999E+99"*4 + "2" + "+9.99999999999999E+99"*4
ds = gdal.GetDriverByName('NITF').Create('/vsimem/nitf_RSMECB.ntf', 1, 1, options=[tre_data])
ds = None
ds = gdal.Open('/vsimem/nitf_RSMECB.ntf')
data = ds.GetMetadata('xml:TRE')[0]
ds = None
gdal.GetDriverByName('NITF').Delete('/vsimem/nitf_RSMECB.ntf')
expected_data = """<tres>
<tre name="RSMECB" location="image">
<field name="IID" value="iid" />
<field name="EDITION" value="edition" />
<field name="TID" value="tid" />
<field name="INCLIC" value="Y" />
<field name="INCLUC" value="Y" />
<field name="NPARO" value="01" />
<field name="IGN" value="01" />
<field name="CVDATE" value="20201102" />
<field name="NPAR" value="01" />
<field name="APTYP" value="G" />
<field name="LOCTYP" value="N" />
<field name="NSFX" value="+9.99999999999999E+99" />
<field name="NSFY" value="+9.99999999999999E+99" />
<field name="NSFZ" value="+9.99999999999999E+99" />
<field name="NOFFX" value="+9.99999999999999E+99" />
<field name="NOFFY" value="+9.99999999999999E+99" />
<field name="NOFFZ" value="+9.99999999999999E+99" />
<field name="APBASE" value="N" />
<field name="NGSAP" value="01" />
<repeated number="1">
<group index="0">
<field name="GSAPID" value="ABCD" />
</group>
</repeated>
<repeated number="1">
<group index="0">
<field name="NUMOPG" value="02" />
<repeated number="3">
<group index="0">
<field name="ERRCVG" value="+9.99999999999999E+99" />
</group>
<group index="1">
<field name="ERRCVG" value="+9.99999999999999E+99" />
</group>
<group index="2">
<field name="ERRCVG" value="+9.99999999999999E+99" />
</group>
</repeated>
<field name="TCDF" value="1" />
<field name="ACSMC" value="N" />
<field name="NCSEG" value="2" />
<repeated number="2">
<group index="0">
<field name="CORSEG" value="+9.99999999999999E+99" />
<field name="TAUSEG" value="+9.99999999999999E+99" />
</group>
<group index="1">
<field name="CORSEG" value="+9.99999999999999E+99" />
<field name="TAUSEG" value="+9.99999999999999E+99" />
</group>
</repeated>
</group>
</repeated>
<repeated number="1">
<group index="0">
<repeated number="1">
<group index="0">
<field name="MAP" value="+9.99999999999999E+99" />
</group>
</repeated>
</group>
</repeated>
<field name="URR" value="+9.99999999999999E+99" />
<field name="URC" value="+9.99999999999999E+99" />
<field name="UCC" value="+9.99999999999999E+99" />
<field name="UACSMC" value="N" />
<field name="UNCSR" value="2" />
<repeated number="2">
<group index="0">
<field name="UCORSR" value="+9.99999999999999E+99" />
<field name="UTAUSR" value="+9.99999999999999E+99" />
</group>
<group index="1">
<field name="UCORSR" value="+9.99999999999999E+99" />
<field name="UTAUSR" value="+9.99999999999999E+99" />
</group>
</repeated>
<field name="UNCSC" value="2" />
<repeated number="2">
<group index="0">
<field name="UCORSC" value="+9.99999999999999E+99" />
<field name="UTAUSC" value="+9.99999999999999E+99" />
</group>
<group index="1">
<field name="UCORSC" value="+9.99999999999999E+99" />
<field name="UTAUSC" value="+9.99999999999999E+99" />
</group>
</repeated>
</tre>
</tres>
"""
assert data == expected_data
###############################################################################
# Test creation and reading of Data Extension Segments (DES)
def test_nitf_des():
des_data = "02U" + " "*166 + r'0004ABCD1234567\0890'
ds = gdal.GetDriverByName("NITF").Create("/vsimem/nitf_DES.ntf", 1, 1, options=["DES=DES1=" + des_data, "DES=DES2=" + des_data])
ds = None
# DESDATA portion will be Base64 encoded on output
# base64.b64encode(bytes("1234567\x00890", "utf-8")) == b'MTIzNDU2NwA4OTA='
ds = gdal.Open("/vsimem/nitf_DES.ntf")
data = ds.GetMetadata("xml:DES")[0]
ds = None
gdal.GetDriverByName('NITF').Delete('/vsimem/nitf_DES.ntf')
expected_data = """<des_list>
<des name="DES1">
<field name="NITF_DESVER" value="02" />
<field name="NITF_DECLAS" value="U" />
<field name="NITF_DESCLSY" value="" />
<field name="NITF_DESCODE" value="" />
<field name="NITF_DESCTLH" value="" />
<field name="NITF_DESREL" value="" />
<field name="NITF_DESDCTP" value="" />
<field name="NITF_DESDCDT" value="" />
<field name="NITF_DESDCXM" value="" />
<field name="NITF_DESDG" value="" />
<field name="NITF_DESDGDT" value="" />
<field name="NITF_DESCLTX" value="" />
<field name="NITF_DESCATP" value="" />
<field name="NITF_DESCAUT" value="" />
<field name="NITF_DESCRSN" value="" />
<field name="NITF_DESSRDT" value="" />
<field name="NITF_DESCTLN" value="" />
<field name="NITF_DESSHL" value="0004" />
<field name="NITF_DESSHF" value="ABCD" />
<field name="NITF_DESDATA" value="MTIzNDU2NwA4OTA=" />
</des>
<des name="DES2">
<field name="NITF_DESVER" value="02" />
<field name="NITF_DECLAS" value="U" />
<field name="NITF_DESCLSY" value="" />
<field name="NITF_DESCODE" value="" />
<field name="NITF_DESCTLH" value="" />
<field name="NITF_DESREL" value="" />
<field name="NITF_DESDCTP" value="" />
<field name="NITF_DESDCDT" value="" />
<field name="NITF_DESDCXM" value="" />
<field name="NITF_DESDG" value="" />
<field name="NITF_DESDGDT" value="" />
<field name="NITF_DESCLTX" value="" />
<field name="NITF_DESCATP" value="" />
<field name="NITF_DESCAUT" value="" />
<field name="NITF_DESCRSN" value="" />
<field name="NITF_DESSRDT" value="" />
<field name="NITF_DESCTLN" value="" />
<field name="NITF_DESSHL" value="0004" />
<field name="NITF_DESSHF" value="ABCD" />
<field name="NITF_DESDATA" value="MTIzNDU2NwA4OTA=" />
</des>
</des_list>
"""
assert data == expected_data
###############################################################################
# Test reading C4 compressed file
def test_nitf_read_C4():
ds = gdal.Open('data/nitf/RPFTOC01.ON2')
cs = ds.GetRasterBand(1).Checksum()
assert cs == 53599
###############################################################################
# Test reading a file with a SENSRB TRE
def test_nitf_SENSRB():
ds = gdal.Open('data/nitf/SENSRB_TRE.ntf')
data = ds.GetMetadata('xml:TRE')[0]
ds = None
expected_data = """<tres>
<tre name="SENSRB" location="image">
<field name="GENERAL_DATA" value="Y" />
<field name="SENSOR" value="" />
<field name="SENSOR_URI" value="" />
<field name="PLATFORM" value=" UMS" />
<field name="PLATFORM_URI" value="" />
<field name="OPERATION_DOMAIN" value="" />
<field name="CONTENT_LEVEL" value="4" />
<field name="GEODETIC_SYSTEM" value="" />
<field name="GEODETIC_TYPE" value="" />
<field name="ELEVATION_DATUM" value="" />
<field name="LENGTH_UNIT" value=" m" />
<field name="ANGULAR_UNIT" value="deg" />
<field name="START_DATE" value="" />
<field name="START_TIME" value="00000000000000" />
<field name="END_DATE" value="20190507" />
<field name="END_TIME" value="0000084.059869" />
<field name="GENERATION_COUNT" value="00" />
<field name="GENERATION_DATE" value="" />
<field name="GENERATION_TIME" value="" />
<field name="SENSOR_ARRAY_DATA" value="" />
<field name="SENSOR_CALIBRATION_DATA" value="" />
<field name="IMAGE_FORMATION_DATA" value="Y" />
<field name="METHOD" value="" />
<field name="MODE" value="" />
<field name="ROW_COUNT" value="00000000" />
<field name="COLUMN_COUNT" value="00000000" />
<field name="ROW_SET" value="00000000" />
<field name="COLUMN_SET" value="00000000" />
<field name="ROW_RATE" value="0000000000" />
<field name="COLUMN_RATE" value="0000000000" />
<field name="FIRST_PIXEL_ROW" value="00000000" />
<field name="FIRST_PIXEL_COLUMN" value="00000000" />
<field name="TRANSFORM_PARAMS" value="3" />
<repeated name="TRANSFORM_PARAM" number="3">
<group index="0">
<field name="TRANSFORM_PARAM" value=" 470" />
</group>
<group index="1">
<field name="TRANSFORM_PARAM" value=" 471" />
</group>
<group index="2">
<field name="TRANSFORM_PARAM" value=" 472" />
</group>
</repeated>
<field name="REFERENCE_TIME" value="" />
<field name="REFERENCE_ROW" value="" />
<field name="REFERENCE_COLUMN" value="" />
<field name="LATITUDE_OR_X" value=" 43643267" />
<field name="LONGITUDE_OR_Y" value="" />
<field name="ALTITUDE_OR_Z" value="" />
<field name="SENSOR_X_OFFSET" value="00000000" />
<field name="SENSOR_Y_OFFSET" value="00000000" />
<field name="SENSOR_Z_OFFSET" value="00000000" />
<field name="ATTITUDE_EULER_ANGLES" value="" />
<field name="ATTITUDE_UNIT_VECTORS" value="" />
<field name="ATTITUDE_QUATERNION" value="" />
<field name="SENSOR_VELOCITY_DATA" value="" />
<field name="POINT_SET_DATA" value="00" />
<field name="TIME_STAMPED_DATA_SETS" value="02" />
<repeated name="TIME_STAMPED_SET" number="2">
<group index="0">
<field name="TIME_STAMP_TYPE_MM" value="06b" />
<field name="TIME_STAMP_COUNT_MM" value="0003" />
<repeated name="TIME_STAMP_COUNTS" number="3">
<group index="0">
<field name="TIME_STAMP_TIME_NNNN" value="111111111111" />
<field name="TIME_STAMP_VALUE_NNNN" value="111100001111" />
</group>
<group index="1">
<field name="TIME_STAMP_TIME_NNNN" value="222222222222" />
<field name="TIME_STAMP_VALUE_NNNN" value="222200001111" />
</group>
<group index="2">
<field name="TIME_STAMP_TIME_NNNN" value="333333333333" />
<field name="TIME_STAMP_VALUE_NNNN" value="333300001111" />
</group>
</repeated>
</group>
<group index="1">
<field name="TIME_STAMP_TYPE_MM" value="06e" />
<field name="TIME_STAMP_COUNT_MM" value="0002" />
<repeated name="TIME_STAMP_COUNTS" number="2">
<group index="0">
<field name="TIME_STAMP_TIME_NNNN" value="444444444444" />
<field name="TIME_STAMP_VALUE_NNNN" value="44440000" />
</group>
<group index="1">
<field name="TIME_STAMP_TIME_NNNN" value="555555555555" />
<field name="TIME_STAMP_VALUE_NNNN" value="55550000" />
</group>
</repeated>
</group>
</repeated>
<field name="PIXEL_REFERENCED_DATA_SETS" value="00" />
<field name="UNCERTAINTY_DATA" value="000" />
<field name="ADDITIONAL_PARAMETER_DATA" value="000" />
</tre>
</tres>
"""
assert data == expected_data, data
###############################################################################
# Verify we can read UDID metadata
def test_nitf_valid_udid():
ds = gdal.Open('data/nitf/valid_udid.ntf')
md = ds.GetMetadata()
assert md['NITF_CSDIDA_YEAR'] == '2019', \
'UDID CSDIDA metadata has unexpected value.'
assert md['NITF_BLOCKA_BLOCK_INSTANCE_01'] == '01', \
'BLOCKA metadata has unexpected value.'
###############################################################################
# Verify that bad UDID metadata doesn't prevent reading IXSHD metadata
def test_nitf_invalid_udid():
ds = gdal.Open('data/nitf/invalid_udid.ntf')
md = ds.GetMetadata()
assert 'NITF_CSDIDA_YEAR' not in md, \
'Unexpected parings of UDID CSDIDA metadata.'
assert md['NITF_BLOCKA_BLOCK_INSTANCE_01'] == '01', \
'BLOCKA metadata has unexpected value.'
###############################################################################
# Test NITF21_CGM_ANNO_Uncompressed_unmasked.ntf for bug #1313 and #1714
def test_nitf_online_1():
if not gdaltest.download_file('http://download.osgeo.org/gdal/data/nitf/bugs/NITF21_CGM_ANNO_Uncompressed_unmasked.ntf', 'NITF21_CGM_ANNO_Uncompressed_unmasked.ntf'):
pytest.skip()
tst = gdaltest.GDALTest('NITF', 'tmp/cache/NITF21_CGM_ANNO_Uncompressed_unmasked.ntf', 1, 13123, filename_absolute=1)
# Shut up the warning about missing image segment
gdal.PushErrorHandler('CPLQuietErrorHandler')
ret = tst.testOpen()
gdal.PopErrorHandler()
return ret
###############################################################################
# Test NITF file with multiple images
def test_nitf_online_2():
if not gdaltest.download_file('http://download.osgeo.org/gdal/data/nitf/nitf1.1/U_0001a.ntf', 'U_0001a.ntf'):
pytest.skip()
ds = gdal.Open('tmp/cache/U_0001a.ntf')
md = ds.GetMetadata('SUBDATASETS')
assert 'SUBDATASET_1_NAME' in md, 'missing SUBDATASET_1_NAME metadata'
ds = None
###############################################################################
# Test ARIDPCM (C2) image
def test_nitf_online_3():
if not gdaltest.download_file('http://download.osgeo.org/gdal/data/nitf/nitf1.1/U_0001a.ntf', 'U_0001a.ntf'):
pytest.skip()
tst = gdaltest.GDALTest('NITF', 'NITF_IM:3:tmp/cache/U_0001a.ntf', 1, 23463, filename_absolute=1)
return tst.testOpen()
###############################################################################
# Test Vector Quantization (VQ) (C4) file
def test_nitf_online_4():
if not gdaltest.download_file('http://download.osgeo.org/gdal/data/nitf/cadrg/001zc013.on1', '001zc013.on1'):
pytest.skip()
# check that the RPF attribute metadata was carried through.
ds = gdal.Open('tmp/cache/001zc013.on1')
md = ds.GetMetadata()
assert md['NITF_RPF_CurrencyDate'] == '19950720' and md['NITF_RPF_ProductionDate'] == '19950720' and md['NITF_RPF_SignificantDate'] == '19890629', \
'RPF attribute metadata not captured (#3413)'
ds = None
tst = gdaltest.GDALTest('NITF', 'tmp/cache/001zc013.on1', 1, 53960, filename_absolute=1)
return tst.testOpen()
###############################################################################
# Test Vector Quantization (VQ) (M4) file
def test_nitf_online_5():
if not gdaltest.download_file('http://download.osgeo.org/gdal/data/nitf/cadrg/overview.ovr', 'overview.ovr'):
pytest.skip()
tst = gdaltest.GDALTest('NITF', 'tmp/cache/overview.ovr', 1, 60699, filename_absolute=1)
return tst.testOpen()
###############################################################################
# Test a JPEG compressed, single blocked 2048x2048 mono image
def test_nitf_online_6():
if not gdaltest.download_file('http://download.osgeo.org/gdal/data/nitf/nitf2.0/U_4001b.ntf', 'U_4001b.ntf'):
pytest.skip()
tst = gdaltest.GDALTest('NITF', 'tmp/cache/U_4001b.ntf', 1, 60030, filename_absolute=1)
return tst.testOpen()
###############################################################################
# Test all combinations of IMODE (S,P,B,R) for an image with 6 bands whose 3 are RGB
def test_nitf_online_7():
for filename in ['ns3228b.nsf', 'i_3228c.ntf', 'ns3228d.nsf', 'i_3228e.ntf']:
if not gdaltest.download_file('http://www.gwg.nga.mil/ntb/baseline/software/testfile/Nitfv2_1/' + filename, filename):
pytest.skip()
ds = gdal.Open('tmp/cache/' + filename)
assert ds.RasterCount == 6
checksums = [48385, 48385, 40551, 54223, 48385, 33094]
colorInterpretations = [gdal.GCI_Undefined, gdal.GCI_Undefined, gdal.GCI_RedBand, gdal.GCI_BlueBand, gdal.GCI_Undefined, gdal.GCI_GreenBand]
for i in range(6):
cs = ds.GetRasterBand(i + 1).Checksum()
assert cs == checksums[i], ('got checksum %d for image %s'
% (cs, filename))
assert ds.GetRasterBand(i + 1).GetRasterColorInterpretation() == colorInterpretations[i], \
('got wrong color interp for image %s'
% filename)
ds = None
###############################################################################
# Test JPEG-compressed multi-block mono-band image with a data mask subheader (IC=M3, IMODE=B)
def test_nitf_online_8():
if not gdaltest.download_file('http://www.gwg.nga.mil/ntb/baseline/software/testfile/Nitfv2_1/ns3301j.nsf', 'ns3301j.nsf'):
pytest.skip()
tst = gdaltest.GDALTest('NITF', 'tmp/cache/ns3301j.nsf', 1, 56861, filename_absolute=1)
return tst.testOpen()
###############################################################################
# Test JPEG-compressed multi-block mono-band image without a data mask subheader (IC=C3, IMODE=B)
def test_nitf_online_9():
if not gdaltest.download_file('http://www.gwg.nga.mil/ntb/baseline/software/testfile/Nitfv2_1/ns3304a.nsf', 'ns3304a.nsf'):
pytest.skip()
tst = gdaltest.GDALTest('NITF', 'tmp/cache/ns3304a.nsf', 1, 32419, filename_absolute=1)
return tst.testOpen()
###############################################################################
# Verify that CGM access on a file with 8 CGM segments
def test_nitf_online_10():
if not gdaltest.download_file('http://www.gwg.nga.mil/ntb/baseline/software/testfile/Nitfv2_1/ns3119b.nsf', 'ns3119b.nsf'):
pytest.skip()
# Shut up the warning about missing image segment
gdal.PushErrorHandler('CPLQuietErrorHandler')
ds = gdal.Open('tmp/cache/ns3119b.nsf')
gdal.PopErrorHandler()
mdCGM = ds.GetMetadata('CGM')
ds = None
assert mdCGM['SEGMENT_COUNT'] == '8', 'wrong SEGMENT_COUNT.'
tab = [
('SEGMENT_0_SLOC_ROW', '0'),
('SEGMENT_0_SLOC_COL', '0'),
('SEGMENT_0_CCS_COL', '0'),
('SEGMENT_0_CCS_COL', '0'),
('SEGMENT_0_SDLVL', '1'),
('SEGMENT_0_SALVL', '0'),
('SEGMENT_1_SLOC_ROW', '0'),
('SEGMENT_1_SLOC_COL', '684'),
('SEGMENT_2_SLOC_ROW', '0'),
('SEGMENT_2_SLOC_COL', '1364'),
('SEGMENT_3_SLOC_ROW', '270'),
('SEGMENT_3_SLOC_COL', '0'),
('SEGMENT_4_SLOC_ROW', '270'),
('SEGMENT_4_SLOC_COL', '684'),
('SEGMENT_5_SLOC_ROW', '270'),
('SEGMENT_5_SLOC_COL', '1364'),
('SEGMENT_6_SLOC_ROW', '540'),
('SEGMENT_6_SLOC_COL', '0'),
('SEGMENT_7_SLOC_ROW', '540'),
('SEGMENT_7_SLOC_COL', '1364'),
('SEGMENT_7_CCS_ROW', '540'),
('SEGMENT_7_CCS_COL', '1364'),
('SEGMENT_7_SDLVL', '8'),
('SEGMENT_7_SALVL', '0'),
]
for item in tab:
assert mdCGM[item[0]] == item[1], ('wrong value for %s.' % item[0])
###############################################################################
# 5 text files
def test_nitf_online_11():
if not gdaltest.download_file('http://download.osgeo.org/gdal/data/nitf/nitf2.0/U_1122a.ntf', 'U_1122a.ntf'):
pytest.skip()
ds = gdal.Open('tmp/cache/U_1122a.ntf')
mdTEXT = ds.GetMetadata('TEXT')
ds = None
assert mdTEXT['DATA_0'] == 'This is test text file 01.\r\n', \
'did not find expected DATA_0 from metadata.'
assert mdTEXT['DATA_1'] == 'This is test text file 02.\r\n', \
'did not find expected DATA_1 from metadata.'
assert mdTEXT['DATA_2'] == 'This is test text file 03.\r\n', \
'did not find expected DATA_2 from metadata.'
assert mdTEXT['DATA_3'] == 'This is test text file 04.\r\n', \
'did not find expected DATA_3 from metadata.'
assert mdTEXT['DATA_4'] == 'This is test text file 05.\r\n', \
'did not find expected DATA_4 from metadata.'
###############################################################################
# Test 12 bit uncompressed image.
def test_nitf_online_12():
if not gdaltest.download_file('http://download.osgeo.org/gdal/data/nitf/bugs/i_3430a.ntf', 'i_3430a.ntf'):
pytest.skip()
tst = gdaltest.GDALTest('NITF', 'tmp/cache/i_3430a.ntf', 1, 38647,
filename_absolute=1)
return tst.testOpen()
###############################################################################
# Test complex relative graphic/image attachment.
def test_nitf_online_13():
if not gdaltest.download_file('http://download.osgeo.org/gdal/data/nitf/u_3054a.ntf', 'u_3054a.ntf'):
pytest.skip()
# Shut up the warning about missing image segment
ds = gdal.Open('NITF_IM:2:tmp/cache/u_3054a.ntf')
mdCGM = ds.GetMetadata('CGM')
md = ds.GetMetadata()
ds = None
assert mdCGM['SEGMENT_COUNT'] == '3', 'wrong SEGMENT_COUNT.'
tab = [
('SEGMENT_2_SLOC_ROW', '0'),
('SEGMENT_2_SLOC_COL', '0'),
('SEGMENT_2_CCS_COL', '1100'),
('SEGMENT_2_CCS_COL', '1100'),
('SEGMENT_2_SDLVL', '6'),
('SEGMENT_2_SALVL', '3')
]
for item in tab:
assert mdCGM[item[0]] == item[1], ('wrong value for %s.' % item[0])
tab = [
('NITF_IDLVL', '3'),
('NITF_IALVL', '1'),
('NITF_ILOC_ROW', '1100'),
('NITF_ILOC_COLUMN', '1100'),
('NITF_CCS_ROW', '1100'),
('NITF_CCS_COLUMN', '1100'),
]
for item in tab:
assert md[item[0]] == item[1], ('wrong value for %s, got %s instead of %s.'
% (item[0], md[item[0]], item[1]))
###############################################################################
# Check reading a 12-bit JPEG compressed NITF (multi-block)
def test_nitf_online_14(not_jpeg_9b):
if not gdaltest.download_file('http://download.osgeo.org/gdal/data/nitf/nitf2.0/U_4020h.ntf', 'U_4020h.ntf'):
pytest.skip()
try:
os.remove('tmp/cache/U_4020h.ntf.aux.xml')
except OSError:
pass
# Check if JPEG driver supports 12bit JPEG reading/writing
jpg_drv = gdal.GetDriverByName('JPEG')
md = jpg_drv.GetMetadata()
if md[gdal.DMD_CREATIONDATATYPES].find('UInt16') == -1:
pytest.skip('12bit jpeg not available')
ds = gdal.Open('tmp/cache/U_4020h.ntf')
assert ds.GetRasterBand(1).DataType == gdal.GDT_UInt16
stats = ds.GetRasterBand(1).GetStatistics(0, 1)
assert stats[2] >= 2607 and stats[2] <= 2608
ds = None
try:
os.remove('tmp/cache/U_4020h.ntf.aux.xml')
except OSError:
pass
###############################################################################
# Test opening a IC=C8 NITF file with the various JPEG2000 drivers
def nitf_online_15(driver_to_test, expected_cs=1054):
if not gdaltest.download_file('http://www.gwg.nga.mil/ntb/baseline/software/testfile/Jpeg2000/p0_01/p0_01a.ntf', 'p0_01a.ntf'):
pytest.skip()
jp2_drv = gdal.GetDriverByName(driver_to_test)
if jp2_drv is None:
pytest.skip()
# Deregister other potential conflicting JPEG2000 drivers
gdaltest.deregister_all_jpeg2000_drivers_but(driver_to_test)
ds = gdal.Open('tmp/cache/p0_01a.ntf')
if ds.GetRasterBand(1).Checksum() == expected_cs:
ret = 'success'
else:
print(ds.GetRasterBand(1).Checksum())
gdaltest.post_reason('Did not get expected checksums')
ret = 'fail'
gdaltest.reregister_all_jpeg2000_drivers()
return ret
def test_nitf_online_15_jp2ecw():
return nitf_online_15('JP2ECW')
def test_nitf_online_15_jp2mrsid():
return nitf_online_15('JP2MrSID')
def test_nitf_online_15_jp2kak():
return nitf_online_15('JP2KAK')
def test_nitf_online_15_jasper():
return nitf_online_15('JPEG2000')
def test_nitf_online_15_openjpeg():
return nitf_online_15('JP2OpenJPEG')
###############################################################################
# Test opening a IC=C8 NITF file which has 256-entry palette/LUT in both JP2 Header and image Subheader
# We expect RGB expansion from some JPEG2000 driver
def nitf_online_16(driver_to_test):
if not gdaltest.download_file('http://www.gwg.nga.mil/ntb/baseline/software/testfile/Jpeg2000/jp2_09/file9_jp2_2places.ntf', 'file9_jp2_2places.ntf'):
pytest.skip()
jp2_drv = gdal.GetDriverByName(driver_to_test)
if jp2_drv is None:
pytest.skip()
# Deregister other potential conflicting JPEG2000 drivers
gdaltest.deregister_all_jpeg2000_drivers_but(driver_to_test)
ds = gdal.Open('tmp/cache/file9_jp2_2places.ntf')
# JPEG2000 driver
if ds.RasterCount == 3 and \
ds.GetRasterBand(1).Checksum() == 48954 and \
ds.GetRasterBand(2).Checksum() == 4939 and \
ds.GetRasterBand(3).Checksum() == 17734:
ret = 'success'
elif ds.RasterCount == 1 and \
ds.GetRasterBand(1).Checksum() == 47664 and \
ds.GetRasterBand(1).GetRasterColorTable() is not None:
ret = 'success'
else:
print(ds.RasterCount)
for i in range(ds.RasterCount):
print(ds.GetRasterBand(i + 1).Checksum())
print(ds.GetRasterBand(1).GetRasterColorTable())
gdaltest.post_reason('Did not get expected checksums')
ret = 'fail'
gdaltest.reregister_all_jpeg2000_drivers()
return ret
def test_nitf_online_16_jp2ecw():
return nitf_online_16('JP2ECW')
def test_nitf_online_16_jp2mrsid():
return nitf_online_16('JP2MrSID')
def test_nitf_online_16_jp2kak():
return nitf_online_16('JP2KAK')
def test_nitf_online_16_jasper():
return nitf_online_16('JPEG2000')
def test_nitf_online_16_openjpeg():
return nitf_online_16('JP2OpenJPEG')
###############################################################################
# Test opening a IC=C8 NITF file which has 256-entry/LUT in Image Subheader, JP2 header completely removed
# We don't expect RGB expansion from the JPEG2000 driver
def nitf_online_17(driver_to_test):
if not gdaltest.download_file('http://www.gwg.nga.mil/ntb/baseline/software/testfile/Jpeg2000/jp2_09/file9_j2c.ntf', 'file9_j2c.ntf'):
pytest.skip()
jp2_drv = gdal.GetDriverByName(driver_to_test)
if jp2_drv is None:
pytest.skip()
# Deregister other potential conflicting JPEG2000 drivers
gdaltest.deregister_all_jpeg2000_drivers_but(driver_to_test)
ds = gdal.Open('tmp/cache/file9_j2c.ntf')
if ds.RasterCount == 1 and \
ds.GetRasterBand(1).Checksum() == 47664 and \
ds.GetRasterBand(1).GetRasterColorTable() is not None:
ret = 'success'
else:
print(ds.RasterCount)
for i in range(ds.RasterCount):
print(ds.GetRasterBand(i + 1).Checksum())
print(ds.GetRasterBand(1).GetRasterColorTable())
gdaltest.post_reason('Did not get expected checksums')
ret = 'fail'
gdaltest.reregister_all_jpeg2000_drivers()
return ret
def test_nitf_online_17_jp2ecw():
return nitf_online_17('JP2ECW')
def test_nitf_online_17_jp2mrsid():
return nitf_online_17('JP2MrSID')
def test_nitf_online_17_jp2kak():
return nitf_online_17('JP2KAK')
def test_nitf_online_17_jasper():
return nitf_online_17('JPEG2000')
def test_nitf_online_17_openjpeg():
return nitf_online_17('JP2OpenJPEG')
###############################################################################
# Test polar stereographic CADRG tile.
def test_nitf_online_18():
if not gdaltest.download_file('http://download.osgeo.org/gdal/data/nitf/bugs/bug3337.ntf', 'bug3337.ntf'):
pytest.skip()
ds = gdal.Open('tmp/cache/bug3337.ntf')
gt = ds.GetGeoTransform()
prj = ds.GetProjection()
# If we have functioning coordinate transformer.
if prj[:6] == 'PROJCS':
assert prj.find('Azimuthal_Equidistant') != -1, 'wrong projection?'
expected_gt = (-1669792.3618991028, 724.73626818537502, 0.0, -556597.45396636717, 0.0, -724.73626818537434)
assert gdaltest.geotransform_equals(gt, expected_gt, 1.0), \
'did not get expected geotransform.'
# If we do not have a functioning coordinate transformer.
else:
assert prj == '' and gdaltest.geotransform_equals(gt, (0, 1, 0, 0, 0, 1), 0.00000001), \
'did not get expected empty gt/projection'
prj = ds.GetGCPProjection()
assert prj[:6] == 'GEOGCS', 'did not get expected geographic srs'
gcps = ds.GetGCPs()
gcp3 = gcps[3]
assert gcp3.GCPPixel == 0 and gcp3.GCPLine == 1536 and abs(gcp3.GCPX + 45) <= 0.0000000001 and gcp3.GCPY == pytest.approx(68.78679656, abs=0.00000001), \
'did not get expected gcp.'
ds = None
###############################################################################
# Test CADRG tile crossing dateline (#3383)
def test_nitf_online_19():
if not gdaltest.download_file('http://download.osgeo.org/gdal/data/nitf/0000M033.GN3', '0000M033.GN3'):
pytest.skip()
tst = gdaltest.GDALTest('NITF', 'tmp/cache/0000M033.GN3', 1, 38928,
filename_absolute=1)
return tst.testOpen(check_gt=(174.375000000000000, 0.010986328125000, 0,
51.923076923076927, 0, -0.006760817307692))
###############################################################################
# Check that the RPF attribute metadata was carried through.
# Special case where the reported size of the attribute subsection is
# smaller than really available
def test_nitf_online_20():
if not gdaltest.download_file('http://download.osgeo.org/gdal/data/nitf/0000M033.GN3', '0000M033.GN3'):
pytest.skip()
# check that the RPF attribute metadata was carried through.
# Special case where the reported size of the attribute subsection is
# smaller than really available
ds = gdal.Open('tmp/cache/0000M033.GN3')
md = ds.GetMetadata()
assert md['NITF_RPF_CurrencyDate'] == '19941201' and md['NITF_RPF_ProductionDate'] == '19980511' and md['NITF_RPF_SignificantDate'] == '19850305', \
'RPF attribute metadata not captured (#3413)'
###############################################################################
# Check that we can read NITF header located in STREAMING_FILE_HEADER DE
# segment when header at beginning of file is incomplete
def test_nitf_online_21():
if not gdaltest.download_file('http://www.gwg.nga.mil/ntb/baseline/software/testfile/Nitfv2_1/ns3321a.nsf', 'ns3321a.nsf'):
pytest.skip()
ds = gdal.Open('tmp/cache/ns3321a.nsf')
md = ds.GetMetadata()
ds = None
# If we get NS3321A, it means we are not exploiting the header from the STREAMING_FILE_HEADER DE segment
assert md['NITF_OSTAID'] == 'I_3321A', \
'did not get expected OSTAID value'
###############################################################################
# Test fix for #3002 (reconcile NITF file with LA segments)
#
def test_nitf_online_22():
if not gdaltest.download_file('http://www.gwg.nga.mil/ntb/baseline/software/testfile/Nitfv1_1/U_0001C.NTF', 'U_0001C.NTF'):
pytest.skip()
ds = gdal.Open('NITF_IM:1:tmp/cache/U_0001C.NTF')
md = ds.GetMetadata()
ds = None
tab = [
('NITF_IDLVL', '6'),
('NITF_IALVL', '1'),
('NITF_ILOC_ROW', '360'),
('NITF_ILOC_COLUMN', '380'),
('NITF_CCS_ROW', '425'),
('NITF_CCS_COLUMN', '410'),
]
for item in tab:
assert md[item[0]] == item[1], ('(1) wrong value for %s, got %s instead of %s.'
% (item[0], md[item[0]], item[1]))
ds = gdal.Open('NITF_IM:2:tmp/cache/U_0001C.NTF')
md = ds.GetMetadata()
ds = None
tab = [
('NITF_IDLVL', '11'),
('NITF_IALVL', '2'),
('NITF_ILOC_ROW', '360'),
('NITF_ILOC_COLUMN', '40'),
('NITF_CCS_ROW', '422'),
('NITF_CCS_COLUMN', '210'),
]
for item in tab:
assert md[item[0]] == item[1], ('(2) wrong value for %s, got %s instead of %s.'
% (item[0], md[item[0]], item[1]))
ds = gdal.Open('NITF_IM:3:tmp/cache/U_0001C.NTF')
md = ds.GetMetadata()
ds = None
tab = [
('NITF_IDLVL', '5'),
('NITF_IALVL', '3'),
('NITF_ILOC_ROW', '40'),
('NITF_ILOC_COLUMN', '240'),
('NITF_CCS_ROW', '-1'),
('NITF_CCS_COLUMN', '-1'),
]
for item in tab:
assert md[item[0]] == item[1], ('(3) wrong value for %s, got %s instead of %s.'
% (item[0], md[item[0]], item[1]))
ds = gdal.Open('NITF_IM:4:tmp/cache/U_0001C.NTF')
md = ds.GetMetadata()
ds = None
tab = [
('NITF_IDLVL', '1'),
('NITF_IALVL', '0'),
('NITF_ILOC_ROW', '65'),
('NITF_ILOC_COLUMN', '30'),
('NITF_CCS_ROW', '65'),
('NITF_CCS_COLUMN', '30'),
]
for item in tab:
assert md[item[0]] == item[1], ('(4) wrong value for %s, got %s instead of %s.'
% (item[0], md[item[0]], item[1]))
###############################################################################
# Test reading a M4 compressed file (fixed for #3848)
def test_nitf_online_23():
if not gdaltest.download_file('http://download.osgeo.org/gdal/data/nitf/nitf2.0/U_3058b.ntf', 'U_3058b.ntf'):
pytest.skip()
tst = gdaltest.GDALTest('NITF', 'tmp/cache/U_3058b.ntf', 1, 44748, filename_absolute=1)
return tst.testOpen()
###############################################################################
# Test reading ECRG frames
def test_nitf_online_24():
if not gdaltest.download_file('http://www.falconview.org/trac/FalconView/downloads/17', 'ECRG_Sample.zip'):
pytest.skip()
try:
os.stat('tmp/cache/ECRG_Sample.zip')
except OSError:
pytest.skip()
oldval = gdal.GetConfigOption('NITF_OPEN_UNDERLYING_DS')
gdal.SetConfigOption('NITF_OPEN_UNDERLYING_DS', 'NO')
ds = gdal.Open('/vsizip/tmp/cache/ECRG_Sample.zip/ECRG_Sample/EPF/clfc/2/000000009s0013.lf2')
gdal.SetConfigOption('NITF_OPEN_UNDERLYING_DS', oldval)
assert ds is not None
xml_tre = ds.GetMetadata('xml:TRE')[0]
ds = None
assert (not (xml_tre.find('<tre name="GEOPSB"') == -1 or \
xml_tre.find('<tre name="J2KLRA"') == -1 or \
xml_tre.find('<tre name="GEOLOB"') == -1 or \
xml_tre.find('<tre name="BNDPLB"') == -1 or \
xml_tre.find('<tre name="ACCPOB"') == -1 or \
xml_tre.find('<tre name="SOURCB"') == -1)), 'did not get expected xml:TRE'
###############################################################################
# Test reading a HRE file
def test_nitf_online_25():
if not gdaltest.download_file('http://www.gwg.nga.mil/ntb/baseline/docs/HRE_spec/Case1_HRE10G324642N1170747W_Uxx.hr5', 'Case1_HRE10G324642N1170747W_Uxx.hr5'):
pytest.skip()
tst = gdaltest.GDALTest('NITF', 'tmp/cache/Case1_HRE10G324642N1170747W_Uxx.hr5', 1, 7099, filename_absolute=1)
tst.testOpen()
ds = gdal.Open('tmp/cache/Case1_HRE10G324642N1170747W_Uxx.hr5')
xml_tre = ds.GetMetadata('xml:TRE')[0]
ds = None
assert xml_tre.find('<tre name="PIAPRD"') != -1, 'did not get expected xml:TRE'
###############################################################################
# Cleanup.
def test_nitf_cleanup():
try:
gdal.GetDriverByName('NITF').Delete('tmp/test_create.ntf')
except RuntimeError:
pass
try:
gdal.GetDriverByName('NITF').Delete('tmp/nitf9.ntf')
except RuntimeError:
pass
try:
gdal.GetDriverByName('NITF').Delete('tmp/test_13.ntf')
except RuntimeError:
pass
try:
gdal.GetDriverByName('NITF').Delete('tmp/test_29.ntf')
except RuntimeError:
pass
try:
gdal.GetDriverByName('NITF').Delete('tmp/test_29_copy.ntf')
except RuntimeError:
pass
try:
gdal.GetDriverByName('NITF').Delete('tmp/nitf36.ntf')
except RuntimeError:
pass
try:
gdal.GetDriverByName('NITF').Delete('tmp/nitf37.ntf')
except RuntimeError:
pass
try:
gdal.GetDriverByName('NITF').Delete('tmp/nitf38.ntf')
os.unlink('tmp/nitf38.ntf_0.ovr')
except (RuntimeError, OSError):
pass
try:
gdal.GetDriverByName('NITF').Delete('tmp/nitf39.ntf')
except (RuntimeError, OSError):
pass
try:
os.stat('tmp/nitf40.ntf')
gdal.GetDriverByName('NITF').Delete('tmp/nitf40.ntf')
except (RuntimeError, OSError):
pass
try:
os.stat('tmp/nitf42.ntf')
gdal.GetDriverByName('NITF').Delete('tmp/nitf42.ntf')
except (OSError, RuntimeError):
pass
try:
gdal.GetDriverByName('NITF').Delete('tmp/nitf44.ntf')
except RuntimeError:
pass
try:
gdal.GetDriverByName('NITF').Delete('tmp/nitf45.ntf')
os.unlink('tmp/nitf45.ntf_0.ovr')
except (RuntimeError, OSError):
pass
try:
os.stat('tmp/nitf46.ntf')
gdal.GetDriverByName('NITF').Delete('tmp/nitf46.ntf')
os.unlink('tmp/nitf46.ntf_0.ovr')
except (RuntimeError, OSError):
pass
try:
gdal.GetDriverByName('NITF').Delete('tmp/nitf49.ntf')
except RuntimeError:
pass
try:
gdal.GetDriverByName('NITF').Delete('tmp/nitf49_2.ntf')
except RuntimeError:
pass
try:
gdal.GetDriverByName('NITF').Delete('tmp/nitf50.ntf')
except RuntimeError:
pass
try:
gdal.GetDriverByName('NITF').Delete('tmp/nitf51.ntf')
except RuntimeError:
pass
try:
gdal.GetDriverByName('NITF').Delete('tmp/nitf52.ntf')
except RuntimeError:
pass
try:
gdal.GetDriverByName('NITF').Delete('tmp/nitf53.ntf')
except RuntimeError:
pass
try:
gdal.GetDriverByName('NITF').Delete('tmp/nitf54.ntf')
except RuntimeError:
pass
try:
gdal.GetDriverByName('NITF').Delete('tmp/nitf55.ntf')
except RuntimeError:
pass
try:
gdal.GetDriverByName('NITF').Delete('tmp/nitf56.ntf')
except RuntimeError:
pass
try:
gdal.GetDriverByName('NITF').Delete('tmp/nitf57.ntf')
except RuntimeError:
pass
try:
gdal.GetDriverByName('NITF').Delete('tmp/nitf58.ntf')
except RuntimeError:
pass
try:
os.remove('tmp/nitf59.hdr')
gdal.GetDriverByName('NITF').Delete('tmp/nitf59.ntf')
except (OSError, RuntimeError):
pass
try:
gdal.GetDriverByName('NITF').Delete('tmp/nitf62.ntf')
except RuntimeError:
pass
try:
gdal.GetDriverByName('NITF').Delete('tmp/nitf63.ntf')
except RuntimeError:
pass
|
/**
* @ngdoc controller
* @name Umbraco.NavigationController
* @function
*
* @description
* Handles the section area of the app
*
* @param {navigationService} navigationService A reference to the navigationService
*/
function NavigationController($scope, $rootScope, $location, $log, $q, $routeParams, $timeout, $cookies, treeService, appState, navigationService, keyboardService, historyService, eventsService, angularHelper, languageResource, contentTypeResource, editorState) {
//this is used to trigger the tree to start loading once everything is ready
var treeInitPromise = $q.defer();
$scope.treeApi = {};
//Bind to the main tree events
$scope.onTreeInit = function () {
$scope.treeApi.callbacks.treeNodeExpanded(nodeExpandedHandler);
//when a tree is loaded into a section, we need to put it into appState
$scope.treeApi.callbacks.treeLoaded(function (args) {
appState.setTreeState("currentRootNode", args.tree);
});
//when a tree node is synced this event will fire, this allows us to set the currentNode
$scope.treeApi.callbacks.treeSynced(function (args) {
if (args.activate === undefined || args.activate === true) {
//set the current selected node
appState.setTreeState("selectedNode", args.node);
//when a node is activated, this is the same as clicking it and we need to set the
//current menu item to be this node as well.
//appState.setMenuState("currentNode", args.node);// Niels: No, we are setting it from the dialog.
}
});
//this reacts to the options item in the tree
$scope.treeApi.callbacks.treeOptionsClick(function (args) {
args.event.stopPropagation();
args.event.preventDefault();
//Set the current action node (this is not the same as the current selected node!)
//appState.setMenuState("currentNode", args.node);// Niels: No, we are setting it from the dialog.
if (args.event && args.event.altKey) {
args.skipDefault = true;
}
navigationService.showMenu(args);
});
$scope.treeApi.callbacks.treeNodeAltSelect(function (args) {
args.event.stopPropagation();
args.event.preventDefault();
args.skipDefault = true;
navigationService.showMenu(args);
});
//this reacts to tree items themselves being clicked
//the tree directive should not contain any handling, simply just bubble events
$scope.treeApi.callbacks.treeNodeSelect(function (args) {
var n = args.node;
args.event.stopPropagation();
args.event.preventDefault();
if (n.metaData && n.metaData["jsClickCallback"] && Utilities.isString(n.metaData["jsClickCallback"]) && n.metaData["jsClickCallback"] !== "") {
//this is a legacy tree node!
var jsPrefix = "javascript:";
var js;
if (n.metaData["jsClickCallback"].startsWith(jsPrefix)) {
js = n.metaData["jsClickCallback"].substr(jsPrefix.length);
}
else {
js = n.metaData["jsClickCallback"];
}
try {
var func = eval(js);
//this is normally not necessary since the eval above should execute the method and will return nothing.
if (func != null && (typeof func === "function")) {
func.call();
}
}
catch (ex) {
$log.error("Error evaluating js callback from legacy tree node: " + ex);
}
}
else if (n.routePath) {
//add action to the history service
historyService.add({ name: n.name, link: n.routePath, icon: n.icon });
//put this node into the tree state
appState.setTreeState("selectedNode", args.node);
//when a node is clicked we also need to set the active menu node to this node
//appState.setMenuState("currentNode", args.node);
//not legacy, lets just set the route value and clear the query string if there is one.
$location.path(n.routePath);
navigationService.clearSearch();
}
else if (n.section) {
$location.path(n.section);
navigationService.clearSearch();
}
navigationService.hideNavigation();
});
return treeInitPromise.promise;
}
//set up our scope vars
$scope.showContextMenuDialog = false;
$scope.showContextMenu = false;
$scope.showSearchResults = false;
$scope.menuDialogTitle = null;
$scope.menuActions = [];
$scope.menuNode = null;
$scope.languages = [];
$scope.selectedLanguage = {};
$scope.page = {};
$scope.page.languageSelectorIsOpen = false;
$scope.currentSection = null;
$scope.customTreeParams = null;
$scope.treeCacheKey = "_";
$scope.showNavigation = appState.getGlobalState("showNavigation");
// tracks all expanded paths so when the language is switched we can resync it with the already loaded paths
var expandedPaths = [];
//trigger search with a hotkey:
keyboardService.bind("ctrl+shift+s", function () {
navigationService.showSearch();
});
//// TODO: remove this it's not a thing
//$scope.selectedId = navigationService.currentId;
var isInit = false;
var evts = [];
//Listen for global state changes
evts.push(eventsService.on("appState.globalState.changed", function (e, args) {
if (args.key === "showNavigation") {
$scope.showNavigation = args.value;
}
}));
//Listen for menu state changes
evts.push(eventsService.on("appState.menuState.changed", function (e, args) {
if (args.key === "showMenuDialog") {
$scope.showContextMenuDialog = args.value;
}
if (args.key === "dialogTemplateUrl") {
$scope.dialogTemplateUrl = args.value;
}
if (args.key === "showMenu") {
$scope.showContextMenu = args.value;
}
if (args.key === "dialogTitle") {
$scope.menuDialogTitle = args.value;
}
if (args.key === "menuActions") {
$scope.menuActions = args.value;
}
if (args.key === "currentNode") {
$scope.menuNode = args.value;
}
}));
//Listen for tree state changes
evts.push(eventsService.on("appState.treeState.changed", function (e, args) {
if (args.key === "currentRootNode") {
//if the changed state is the currentRootNode, determine if this is a full screen app
if (args.value.root && args.value.root.containsTrees === false) {
$rootScope.emptySection = true;
}
else {
$rootScope.emptySection = false;
}
}
}));
//Listen for section state changes
evts.push(eventsService.on("appState.sectionState.changed", function (e, args) {
//section changed
if (args.key === "currentSection" && $scope.currentSection != args.value) {
//before loading the main tree we need to ensure that the nav is ready
navigationService.waitForNavReady().then(() => {
$scope.currentSection = args.value;
//load the tree
configureTreeAndLanguages();
$scope.treeApi.load({ section: $scope.currentSection, customTreeParams: $scope.customTreeParams, cacheKey: $scope.treeCacheKey });
});
}
//show/hide search results
if (args.key === "showSearchResults") {
$scope.showSearchResults = args.value;
}
}));
// Listen for language updates
evts.push(eventsService.on("editors.languages.languageDeleted", function (e, args) {
loadLanguages().then(function (languages) {
$scope.languages = languages;
const defaultCulture = $scope.languages[0].culture;
if (args.language.culture === $scope.selectedLanguage.culture) {
$scope.selectedLanguage = defaultCulture;
if ($scope.languages.length > 1) {
$location.search("mculture", defaultCulture);
} else {
$location.search("mculture", null);
}
var currentEditorState = editorState.getCurrent();
if (currentEditorState && currentEditorState.path) {
$scope.treeApi.syncTree({ path: currentEditorState.path, activate: true });
}
}
});
}));
//Emitted when a language is created or an existing one saved/edited
evts.push(eventsService.on("editors.languages.languageSaved", function (e, args) {
if (args.isNew) {
//A new language has been created - reload languages for tree
loadLanguages().then(function (languages) {
$scope.languages = languages;
});
}
else if (args.language.isDefault) {
//A language was saved and was set to be the new default (refresh the tree, so its at the top)
loadLanguages().then(function (languages) {
$scope.languages = languages;
});
}
}));
//when a user logs out or timesout
evts.push(eventsService.on("app.notAuthenticated", function () {
$scope.authenticated = false;
}));
//when the application is ready and the user is authorized, setup the data
//this will occur anytime a new user logs in!
evts.push(eventsService.on("app.ready", function (evt, data) {
$scope.authenticated = true;
ensureInit();
ensureMainCulture();
}));
// event for infinite editors
evts.push(eventsService.on("appState.editors.open", function (name, args) {
$scope.infiniteMode = args && args.editors.length > 0 ? true : false;
}));
evts.push(eventsService.on("appState.editors.close", function (name, args) {
$scope.infiniteMode = args && args.editors.length > 0 ? true : false;
}));
evts.push(eventsService.on("treeService.removeNode", function (e, args) {
//check to see if the current page has been removed
var currentEditorState = editorState.getCurrent();
if (currentEditorState && currentEditorState.id.toString() === args.node.id.toString()) {
//current page is loaded, so navigate to root
var section = appState.getSectionState("currentSection");
$location.path("/" + section);
}
}));
/**
* For multi language sites, this ensures that mculture is set to either the last selected language or the default one
*/
function ensureMainCulture() {
if ($location.search().mculture) {
return;
}
var language = lastLanguageOrDefault();
if (!language) {
return;
}
// trigger a language selection in the next digest cycle
$timeout(function () {
$scope.selectLanguage(language);
});
}
/**
* Based on the current state of the application, this configures the scope variables that control the main tree and language drop down
*/
function configureTreeAndLanguages() {
//create the custom query string param for this tree, this is currently only relevant for content
if ($scope.currentSection === "content") {
//must use $location here because $routeParams isn't available until after the route change
var mainCulture = $location.search().mculture;
//select the current language if set in the query string
if (mainCulture && $scope.languages && $scope.languages.length > 1) {
var found = _.find($scope.languages, function (l) {
if (mainCulture === true) {
return false;
}
return l.culture.toLowerCase() === mainCulture.toLowerCase();
});
if (found) {
//set the route param
found.active = true;
$scope.selectedLanguage = found;
}
}
var queryParams = {};
if ($scope.selectedLanguage && $scope.selectedLanguage.culture) {
queryParams["culture"] = $scope.selectedLanguage.culture;
if (!mainCulture) {
$location.search("mculture", $scope.selectedLanguage.culture);
}
}
var queryString = $.param(queryParams); //create the query string from the params object
}
if (queryString) {
$scope.customTreeParams = queryString;
$scope.treeCacheKey = queryString; // this tree uses caching but we need to change it's cache key per lang
}
else {
$scope.treeCacheKey = "_"; // this tree uses caching, there's no lang selected so use the default
}
}
/**
* Called when the app is ready and sets up the navigation (should only be called once)
*/
function ensureInit() {
//only run once ever!
if (isInit) {
return;
}
isInit = true;
var navInit = false;
//$routeParams will be populated after $routeChangeSuccess since this controller is used outside ng-view,
//* we listen for the first route change with a section to setup the navigation.
//* we listen for all route changes to track the current section.
$rootScope.$on('$routeChangeSuccess', function () {
//only continue if there's a section available
if ($routeParams.section) {
if (!navInit) {
navInit = true;
initNav();
}
//keep track of the current section when it changes
if ($scope.currentSection != $routeParams.section) {
appState.setSectionState("currentSection", $routeParams.section);
}
}
});
}
/**
* This loads the language data, if the are no variant content types configured this will return no languages
*/
function loadLanguages() {
return contentTypeResource.allowsCultureVariation().then(function (b) {
if (b === true) {
return languageResource.getAll();
} else {
return $q.when([]); //resolve an empty collection
}
});
}
/**
* Called once during init to initialize the navigation/tree/languages
*/
function initNav() {
// load languages
loadLanguages().then(function (languages) {
$scope.languages = languages;
if ($scope.languages.length > 1) {
//if there's already one set, check if it exists
var language = null;
var mainCulture = $location.search().mculture;
if (mainCulture) {
language = _.find($scope.languages, function (l) {
return l.culture.toLowerCase() === mainCulture.toLowerCase();
});
}
if (!language) {
language = lastLanguageOrDefault();
if (language) {
$location.search("mculture", language.culture);
}
}
}
$scope.currentSection = $routeParams.section;
configureTreeAndLanguages();
//resolve the tree promise, set it's property values for loading the tree which will make the tree load
treeInitPromise.resolve({
section: $scope.currentSection,
customTreeParams: $scope.customTreeParams,
cacheKey: $scope.treeCacheKey,
//because angular doesn't return a promise for the resolve method, we need to resort to some hackery, else
//like normal JS promises we could do resolve(...).then()
onLoaded: function () {
//the nav is ready, let the app know
eventsService.emit("app.navigationReady", { treeApi: $scope.treeApi });
}
});
});
}
function lastLanguageOrDefault() {
if (!$scope.languages || $scope.languages.length <= 1) {
return null;
}
// see if we can find a culture in the cookie set when changing language
var lastCulture = $cookies.get("UMB_MCULTURE");
var language = lastCulture ? _.find($scope.languages, function (l) {
return l.culture.toLowerCase() === lastCulture.toLowerCase();
}) : null;
if (!language) {
// no luck, look for the default language
language = _.find($scope.languages, function (l) {
return l.isDefault;
});
}
return language;
}
function nodeExpandedHandler(args) {
//store the reference to the expanded node path
if (args.node) {
treeService._trackExpandedPaths(args.node, expandedPaths);
}
}
$scope.selectLanguage = function (language) {
$location.search("mculture", language.culture);
// add the selected culture to a cookie so the user will log back into the same culture later on (cookie lifetime = one year)
var expireDate = new Date();
expireDate.setDate(expireDate.getDate() + 365);
$cookies.put("UMB_MCULTURE", language.culture, { path: "/", expires: expireDate });
// close the language selector
$scope.page.languageSelectorIsOpen = false;
configureTreeAndLanguages(); //re-bind language to the query string and update the tree params
//reload the tree with it's updated querystring args
$scope.treeApi.load({ section: $scope.currentSection, customTreeParams: $scope.customTreeParams, cacheKey: $scope.treeCacheKey }).then(function () {
//re-sync to currently edited node
var currNode = appState.getTreeState("selectedNode");
//create the list of promises
var promises = [];
//starting with syncing to the currently selected node if there is one
if (currNode) {
var path = treeService.getPath(currNode);
promises.push($scope.treeApi.syncTree({ path: path, activate: true }));
}
// TODO: If we want to keep all paths expanded ... but we need more testing since we need to deal with unexpanding
//for (var i = 0; i < expandedPaths.length; i++) {
// promises.push($scope.treeApi.syncTree({ path: expandedPaths[i], activate: false, forceReload: true }));
//}
//execute them sequentially
// set selected language to active
Utilities.forEach($scope.languages, language => {
language.active = false;
});
language.active = true;
angularHelper.executeSequentialPromises(promises);
});
};
//this reacts to the options item in the tree
// TODO: migrate to nav service
// TODO: is this used?
$scope.searchShowMenu = function (ev, args) {
//always skip default
args.skipDefault = true;
navigationService.showMenu(args);
};
// TODO: migrate to nav service
// TODO: is this used?
$scope.searchHide = function () {
navigationService.hideSearch();
};
//the below assists with hiding/showing the tree
var treeActive = false;
//Sets a service variable as soon as the user hovers the navigation with the mouse
//used by the leaveTree method to delay hiding
$scope.enterTree = function (event) {
treeActive = true;
};
// Hides navigation tree, with a short delay, is cancelled if the user moves the mouse over the tree again
$scope.leaveTree = function (event) {
//this is a hack to handle IE touch events which freaks out due to no mouse events so the tree instantly shuts down
if (!event) {
return;
}
closeTree();
};
$scope.onOutsideClick = function () {
closeTree();
};
function closeTree() {
if (!appState.getGlobalState("touchDevice")) {
treeActive = false;
$timeout(function () {
if (!treeActive) {
navigationService.hideTree();
}
}, 300);
}
}
$scope.toggleLanguageSelector = function () {
$scope.page.languageSelectorIsOpen = !$scope.page.languageSelectorIsOpen;
};
//ensure to unregister from all events!
$scope.$on('$destroy', function () {
for (var e in evts) {
eventsService.unsubscribe(evts[e]);
}
});
}
//register it
angular.module('umbraco').controller("Umbraco.NavigationController", NavigationController);
|
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By:
# Maintained By:
import ggrc.builder
import ggrc.models
from ggrc.builder.json import publish
from ggrc.services.common import Resource
from mock import MagicMock
from tests.ggrc import TestCase
class TestBuilder(TestCase):
"""Note: Since we are using module member lookup to wire the builders up,
we have to clean up after every test. This is why we're using mock and
removing the builders on tearDown.
"""
def mock_service(self, name):
svc = MagicMock(Resource)
svc.url_for.return_value = '/some-url'
self.mock_services[name] = svc
setattr(ggrc.services, name, svc)
return svc
def mock_class(self, name, bases=(), _publish_attrs=None):
cls = MagicMock()
cls.__name__ = name
cls.__bases__ = bases
if _publish_attrs:
cls._publish_attrs = _publish_attrs
self.mock_builders.append(name)
return cls
def mock_model(self, name, bases=(), _publish_attrs=None, **kwarg):
model = MagicMock()
model.__class__ = self.mock_class(name, bases, _publish_attrs)
for k,v in kwarg.items():
setattr(model, k, v)
return model
def setUp(self):
super(TestBuilder, self).setUp()
self.mock_services = {}
self.mock_builders = []
def tearDown(self):
for k in self.mock_services.keys():
delattr(ggrc.services, k)
for k in self.mock_builders:
delattr(ggrc.builder.json, k) if hasattr(ggrc.builder.json, k) else None
super(TestBuilder, self).tearDown()
def test_simple_builder(self):
self.mock_service('MockModel')
model = self.mock_model(
'Mock_test_simple_builder',
foo='bar',
id=1,
_publish_attrs=['foo'],
)
json_obj = publish(model)
self.assertIn('foo', json_obj)
self.assertEqual('bar', json_obj['foo'])
def test_simple_mixin_inheritance(self):
self.mock_service('MockModelWithMixin')
mock_mixin = self.mock_class('MockMixin', _publish_attrs=['boo'])
model = self.mock_model(
'MockModelWithMixin',
bases=(mock_mixin,),
foo='bar',
boo='far',
_publish_attrs=['foo'],
)
json_obj = publish(model)
self.assertDictContainsSubset(
{'foo': 'bar', 'boo': 'far'},
json_obj)
def test_sophisticated_mixins(self):
self.mock_service('ModelA')
self.mock_service('ModelB')
mixin = self.mock_class('Mixin', _publish_attrs=['mixin'])
mixin_subclass = self.mock_class(
'MixinSubclass', (mixin,), _publish_attrs=['mixin_subclass'])
model_a = self.mock_model('ModelA',
bases=(mixin_subclass,),
prop_a='prop_a', mixin='mixin_a', mixin_subclass='mixin_subclass_a',
_publish_attrs=['prop_a'])
model_b = self.mock_model('ModelB',
bases=(mixin,), prop_b='prop_b', mixin='mixin_b',
_publish_attrs=['prop_b'])
json_obj = publish(model_a)
self.assertDictContainsSubset(
{'prop_a': 'prop_a', 'mixin': 'mixin_a',
'mixin_subclass': 'mixin_subclass_a'},
json_obj)
json_obj = publish(model_b)
self.assertDictContainsSubset(
{'prop_b': 'prop_b', 'mixin': 'mixin_b'},
json_obj)
|
from __future__ import division
import numpy as np
import matplotlib.pyplot as plt
import lmfit
import scipy.stats
import scipy.optimize
minimize = lmfit.minimize
from fitter import Fitter
# To use different defaults, change these three import statements.
from kid_readout.analysis.khalil import delayed_generic_s21 as default_model
from kid_readout.analysis.khalil import delayed_generic_guess as default_guess
from kid_readout.analysis.khalil import generic_functions as default_functions
from kid_readout.analysis.khalil import bifurcation_s21, bifurcation_guess
def fit_resonator(freq, s21, mask= None, errors=None, weight_by_errors=True, min_a = 0.08, fstat_thresh = 0.999,
delay_estimate = None, verbose=False):
if delay_estimate is not None:
def my_default_guess(f,data):
params = default_guess(f,data)
params['delay'].value = delay_estimate
return params
else:
my_default_guess = default_guess
rr = Resonator(freq, s21, mask=mask, errors=errors, weight_by_errors=weight_by_errors,guess=my_default_guess)
if delay_estimate is not None:
def my_bifurcation_guess(f,data):
params = bifurcation_guess(f,data)
params['delay'].value = delay_estimate
return params
else:
my_bifurcation_guess = bifurcation_guess
bif = Resonator(freq, s21, mask=mask, errors=errors, weight_by_errors=weight_by_errors,
guess = my_bifurcation_guess, model = bifurcation_s21)
fval = scipy.stats.f_value(np.sum(np.abs(rr.residual())**2),
np.sum(np.abs(bif.residual())**2),
rr.result.nfree, bif.result.nfree)
fstat = scipy.stats.distributions.f.cdf(fval,rr.result.nfree,bif.result.nfree)
aval = bif.result.params['a'].value
aerr = bif.result.params['a'].stderr
reasons = []
if aval <= aerr:
prefer_bif = False
reasons.append("Error on bifurcation parameter exceeds fitted value")
else:
if aval < min_a:
prefer_bif = False
reasons.append("Bifurcation parameter %f is less than minimum required %f" % (aval,min_a))
else:
#not sure this is working right, so leave it out for now.
if False:#fstat < fstat_thresh:
prefer_bif = False
reasons.append("F-statistic %f is less than threshold %f" % (fstat,fstat_thresh))
else:
prefer_bif = True
if verbose and not prefer_bif:
print "Not using bifurcation model because:",(','.join(reasons))
return rr,bif,prefer_bif
def fit_best_resonator(*args,**kwargs):
rr,bif,prefer_bif = fit_resonator(*args,**kwargs)
return (rr,bif)[prefer_bif]
class Resonator(Fitter):
"""
This class represents a single resonator. All of the
model-dependent behavior is contained in functions that are
supplied to the class. There is a little bit of Python magic that
allows for easy access to the fit parameters and functions of only
the fit parameters.
The idea is that, given sweep data f and s21,
r = Resonator(f, s21)
should just work. Modify the import statements to change the
defaults.
"""
def __init__(self, f, data, model=default_model, guess=default_guess, functions=default_functions,
mask=None, errors=None, weight_by_errors=True):
"""
Instantiate a resonator using our current best model.
Parameter model is a function S_21(params, f) that returns the
modeled values of S_21.
Parameter guess is a function guess(f, data) that returns a
good-enough initial guess at all of the fit parameters.
Parameter functions is a dictionary that maps keys that are
valid Python variables to functions that take a Parameters
object as their only argument.
Parameter mask is a boolean array of the same length as f and
data; only points f[mask] and data[mask] are used to fit the
data. The default is to use all data. Use this to exclude
glitches or resonances other than the desired one.
"""
if not np.iscomplexobj(data):
raise TypeError("Resonator data should always be complex, but got real values")
if errors is not None:
if not np.iscomplexobj(errors):
errors = errors*(1+1j) # ensure errors is complex
super(Resonator,self).__init__(f,data,model=model,guess=guess,functions=functions,mask=mask,
errors=errors,weight_by_errors=weight_by_errors)
if self.x_data.max() < 1e6:
self.freq_units_MHz = True
else:
self.freq_units_MHz = False
self.freq_data = self.x_data
self.s21_data = self.y_data
def get_normalization(self, freq, remove_amplitude = True, remove_delay = True, remove_phase = True):
"""
return the complex factor that removes the arbitrary amplitude, cable delay, and phase from the resonator fit
freq : float or array of floats
frequency in same units as the model was built with, at which normalization should be computed
remove_amplitude : bool, default True
include arbitrary amplitude correction
remove_delay : bool, default True
include cable delay correction
remove_phase : bool, default True
include arbitrary phase offset correction
"""
normalization = 1.0
if remove_amplitude:
normalization *= 1.0/self.A_mag
if remove_phase:
phi = self.phi + self.A_phase
else:
phi = 0
if remove_delay:
delay = self.delay
else:
delay = 0
normalization *= np.exp(1j*(2*np.pi*(freq-self.f_phi)*delay - phi))
return normalization
def normalize(self, freq, s21_raw, remove_amplitude = True, remove_delay = True, remove_phase = True):
"""
Normalize s21 data, removing arbitrary ampltude, delay, and phase terms
freq : float or array of floats
frequency in same units as the model was built with, at which normalization should be computed
s21_raw : complex or array of complex
raw s21 data which should be normalized
"""
normalization = self.get_normalization(freq, remove_amplitude=remove_amplitude, remove_delay=remove_delay,
remove_phase= remove_phase)
return s21_raw*normalization
def normalized_model(self,freq,remove_amplitude = True, remove_delay = True, remove_phase = True):
"""
Evaluate the model, removing arbitrary ampltude, delay, and phase terms
freq : float or array of floats
frequency in same units as the model was built with, at which normalized model should be evaluated
"""
return self.normalize(freq, self.model(x=freq),remove_amplitude=remove_amplitude, remove_delay=remove_delay,
remove_phase= remove_phase)
def approx_normalized_gradient(self,freq):
"""
Calculate the approximate gradient of the normalized model dS21/df at the given frequency.
The units will be S21 / Hz
freq : float or array of floats
frequency in same units as the model was built with, at which normalized gradient should be evaluated
"""
if self.freq_units_MHz:
df = 1e-6 # 1 Hz
else:
df = 1.0
f1 = freq+df
y = self.normalized_model(freq)
y1 = self.normalized_model(f1)
gradient = y1-y # division by 1 Hz is implied.
return gradient
def project_s21_to_delta_freq(self,freq,s21,use_data_mean=True,s21_already_normalized=False):
"""
Project s21 data onto the orthogonal vectors tangent and perpendicular to the resonance circle at the
measurement frequency
This results in complex data with the real part projected along the frequency direction (in Hz) and the
imaginary part projected along the dissipation direction (also in pseudo-Hz).
freq : float
frequency in same units as the model was built with, at which the S21 data was measured.
s21 : complex or array of complex
Raw S21 data measured at the indicated frequency
use_data_mean : bool, default True
if true, center the data on the mean of the data before projecting.
if false, center the data on the value of the model evaluated at the measurement frequency.
s21_already_normalized : bool, default False
if true, the s21 data has already been normalized
if false, first normalize the s21 data
"""
if s21_already_normalized:
normalized_s21 = s21
else:
normalized_s21 = self.normalize(freq,s21)
if use_data_mean:
mean_ = normalized_s21.mean()
else:
mean_ = self.normalized_model(freq)
gradient = self.approx_normalized_gradient(freq)
delta_freq = (normalized_s21-mean_)/gradient
return delta_freq
def convert_s21_to_freq_fluctuation(self,freq,s21):
"""
Use formula in Phil's LTD paper to convert S21 data to frequency fluctuations.
The result of this is the same as Re(S21/(dS21/df)), so the same as self.project_s21_to_delta_freq().real
freq : float
frequency in same units as the model was built with, at which the S21 data was measured.
s21 : complex or array of complex
Raw S21 data measured at the indicated frequency
"""
normalized_s21 = self.normalize(freq,s21)
gradient = self.approx_normalized_gradient(freq)
# using notation from Phil's LTD paper
I = normalized_s21.real
Q = normalized_s21.imag
dIdf = gradient.real
dQdf = gradient.imag
ef = (I*dIdf + Q*dQdf)/(dIdf**2 + dQdf**2)
return ef
|
/*!
* bootstrap-fileinput v4.3.4
* http://plugins.krajee.com/file-input
*
* Author: Kartik Visweswaran
* Copyright: 2014 - 2016, Kartik Visweswaran, Krajee.com
*
* Licensed under the BSD 3-Clause
* https://github.com/kartik-v/bootstrap-fileinput/blob/master/LICENSE.md
*/!function(a){"use strict";"function"==typeof define&&define.amd?define(["jquery"],a):"object"==typeof module&&module.exports?module.exports=a(require("jquery")):a(window.jQuery)}(function(a){"use strict";a.fn.fileinputLocales={},a.fn.fileinputThemes={};var b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z,A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z,$,_,aa,ba,ca,da,ea,fa,ga,ha,ia,ja,ka,la,ma,na,oa;b=".fileinput",c="kvFileinputModal",d='style="width:{width};height:{height};"',e='<param name="controller" value="true" />\n<param name="allowFullScreen" value="true" />\n<param name="allowScriptAccess" value="always" />\n<param name="autoPlay" value="false" />\n<param name="autoStart" value="false" />\n<param name="quality" value="high" />\n',f='<div class="file-preview-other">\n<span class="{previewFileIconClass}">{previewFileIcon}</span>\n</div>',g=window.URL||window.webkitURL,h=function(a,b,c){return void 0!==a&&(c?a===b:a.match(b))},i=function(a){if("Microsoft Internet Explorer"!==navigator.appName)return!1;if(10===a)return new RegExp("msie\\s"+a,"i").test(navigator.userAgent);var c,b=document.createElement("div");return b.innerHTML="<!--[if IE "+a+"]> <i></i> <![endif]-->",c=b.getElementsByTagName("i").length,document.body.appendChild(b),b.parentNode.removeChild(b),c},j=function(a,c,d,e){var f=e?c:c.split(" ").join(b+" ")+b;a.off(f).on(f,d)},k={data:{},init:function(a){var b=a.initialPreview,c=a.id;b.length>0&&!ea(b)&&(b=b.split(a.initialPreviewDelimiter)),k.data[c]={content:b,config:a.initialPreviewConfig,tags:a.initialPreviewThumbTags,delimiter:a.initialPreviewDelimiter,previewFileType:a.initialPreviewFileType,previewAsData:a.initialPreviewAsData,template:a.previewGenericTemplate,showZoom:a.fileActionSettings.showZoom,showDrag:a.fileActionSettings.showDrag,getSize:function(b){return a._getSize(b)},parseTemplate:function(b,c,d,e,f,g,h){var i=" file-preview-initial";return a._generatePreviewTemplate(b,c,d,e,f,!1,null,i,g,h)},msg:function(b){return a._getMsgSelected(b)},initId:a.previewInitId,footer:a._getLayoutTemplate("footer").replace(/\{progress}/g,a._renderThumbProgress()),isDelete:a.initialPreviewShowDelete,caption:a.initialCaption,actions:function(b,c,d,e,f,g,h){return a._renderFileActions(b,c,d,e,f,g,h,!0)}}},fetch:function(a){return k.data[a].content.filter(function(a){return null!==a})},count:function(a,b){return k.data[a]&&k.data[a].content?b?k.data[a].content.length:k.fetch(a).length:0},get:function(b,c,d){var j,l,n,o,p,q,e="init_"+c,f=k.data[b],g=f.config[c],h=f.content[c],i=f.initId+"-"+e,m=" file-preview-initial",r=fa("previewAsData",g,f.previewAsData);return d=void 0===d||d,h?(g&&g.frameClass&&(m+=" "+g.frameClass),r?(n=f.previewAsData?fa("type",g,f.previewFileType||"generic"):"generic",o=fa("caption",g),p=k.footer(b,c,d,g&&g.size||null),q=fa("filetype",g,n),j=f.parseTemplate(n,h,o,q,i,p,e,null)):j=f.template.replace(/\{previewId}/g,i).replace(/\{frameClass}/g,m).replace(/\{fileindex}/g,e).replace(/\{content}/g,f.content[c]).replace(/\{template}/g,fa("type",g,f.previewFileType)).replace(/\{footer}/g,k.footer(b,c,d,g&&g.size||null)),f.tags.length&&f.tags[c]&&(j=ia(j,f.tags[c])),da(g)||da(g.frameAttr)||(l=a(document.createElement("div")).html(j),l.find(".file-preview-initial").attr(g.frameAttr),j=l.html(),l.remove()),j):""},add:function(b,c,d,e,f){var h,g=a.extend(!0,{},k.data[b]);return ea(c)||(c=c.split(g.delimiter)),f?(h=g.content.push(c)-1,g.config[h]=d,g.tags[h]=e):(h=c.length-1,g.content=c,g.config=d,g.tags=e),k.data[b]=g,h},set:function(b,c,d,e,f){var h,i,g=a.extend(!0,{},k.data[b]);if(c&&c.length&&(ea(c)||(c=c.split(g.delimiter)),i=c.filter(function(a){return null!==a}),i.length)){if(void 0===g.content&&(g.content=[]),void 0===g.config&&(g.config=[]),void 0===g.tags&&(g.tags=[]),f){for(h=0;h<c.length;h++)c[h]&&g.content.push(c[h]);for(h=0;h<d.length;h++)d[h]&&g.config.push(d[h]);for(h=0;h<e.length;h++)e[h]&&g.tags.push(e[h])}else g.content=c,g.config=d,g.tags=e;k.data[b]=g}},unset:function(a,b){var c=k.count(a);if(c){if(1===c)return k.data[a].content=[],k.data[a].config=[],void(k.data[a].tags=[]);k.data[a].content[b]=null,k.data[a].config[b]=null,k.data[a].tags[b]=null}},out:function(a){var d,b="",c=k.data[a],e=k.count(a,!0);if(0===e)return{content:"",caption:""};for(var f=0;f<e;f++)b+=k.get(a,f);return d=c.msg(k.count(a)),{content:'<div class="file-initial-thumbs">'+b+"</div>",caption:d}},footer:function(a,b,c,d){var e=k.data[a];if(c=void 0===c||c,0===e.config.length||da(e.config[b]))return"";var f=e.config[b],g=fa("caption",f),h=fa("width",f,"auto"),i=fa("url",f,!1),j=fa("key",f,null),l=fa("showDelete",f,!0),m=fa("showZoom",f,e.showZoom),n=fa("showDrag",f,e.showDrag),o=i===!1&&c,p=e.isDelete?e.actions(!1,l,m,n,o,i,j):"",q=e.footer.replace(/\{actions}/g,p);return q.replace(/\{caption}/g,g).replace(/\{size}/g,e.getSize(d)).replace(/\{width}/g,h).replace(/\{indicator}/g,"").replace(/\{indicatorTitle}/g,"")}},l=function(a,b){return b=b||0,"number"==typeof a?a:("string"==typeof a&&(a=parseFloat(a)),isNaN(a)?b:a)},m=function(){return!(!window.File||!window.FileReader)},n=function(){var a=document.createElement("div");return!i(9)&&(void 0!==a.draggable||void 0!==a.ondragstart&&void 0!==a.ondrop)},o=function(){return m()&&window.FormData},p=function(a,b){a.removeClass(b).addClass(b)},X={showRemove:!0,showUpload:!0,showZoom:!0,showDrag:!0,removeIcon:'<i class="glyphicon glyphicon-trash text-danger"></i>',removeClass:"btn btn-xs btn-default",removeTitle:"Remove file",uploadIcon:'<i class="glyphicon glyphicon-upload text-info"></i>',uploadClass:"btn btn-xs btn-default",uploadTitle:"Upload file",zoomIcon:'<i class="glyphicon glyphicon-zoom-in"></i>',zoomClass:"btn btn-xs btn-default",zoomTitle:"View Details",dragIcon:'<i class="glyphicon glyphicon-menu-hamburger"></i>',dragClass:"text-info",dragTitle:"Move / Rearrange",dragSettings:{},indicatorNew:'<i class="glyphicon glyphicon-hand-down text-warning"></i>',indicatorSuccess:'<i class="glyphicon glyphicon-ok-sign text-success"></i>',indicatorError:'<i class="glyphicon glyphicon-exclamation-sign text-danger"></i>',indicatorLoading:'<i class="glyphicon glyphicon-hand-up text-muted"></i>',indicatorNewTitle:"Not uploaded yet",indicatorSuccessTitle:"Uploaded",indicatorErrorTitle:"Upload Error",indicatorLoadingTitle:"Uploading ..."},q='{preview}\n<div class="kv-upload-progress hide"></div>\n<div class="input-group {class}">\n {caption}\n <div class="input-group-btn">\n {remove}\n {cancel}\n {upload}\n {browse}\n </div>\n</div>',r='{preview}\n<div class="kv-upload-progress hide"></div>\n{remove}\n{cancel}\n{upload}\n{browse}\n',s='<div class="file-preview {class}">\n {close} <div class="{dropClass}">\n <div class="file-preview-thumbnails">\n </div>\n <div class="clearfix"></div> <div class="file-preview-status text-center text-success"></div>\n <div class="kv-fileinput-error"></div>\n </div>\n</div>',u='<div class="close fileinput-remove">×</div>\n',t='<i class="glyphicon glyphicon-file kv-caption-icon"></i>',v='<div tabindex="500" class="form-control file-caption {class}">\n <div class="file-caption-name"></div>\n</div>\n',w='<button type="{type}" tabindex="500" title="{title}" class="{css}" {status}>{icon} {label}</button>',x='<a href="{href}" tabindex="500" title="{title}" class="{css}" {status}>{icon} {label}</a>',y='<div tabindex="500" class="{css}" {status}>{icon} {label}</div>',z='<div id="'+c+'" class="file-zoom-dialog modal fade" tabindex="-1" aria-labelledby="'+c+'Label"></div>',A='<div class="modal-dialog modal-lg" role="document">\n <div class="modal-content">\n <div class="modal-header">\n <div class="kv-zoom-actions pull-right">{toggleheader}{fullscreen}{borderless}{close}</div>\n <h3 class="modal-title">{heading} <small><span class="kv-zoom-title"></span></small></h3>\n </div>\n <div class="modal-body">\n <div class="floating-buttons"></div>\n <div class="kv-zoom-body file-zoom-content"></div>\n{prev} {next}\n </div>\n </div>\n</div>\n',B='<div class="progress">\n <div class="{class}" role="progressbar" aria-valuenow="{percent}" aria-valuemin="0" aria-valuemax="100" style="width:{percent}%;">\n {percent}%\n </div>\n</div>',C=" <br><samp>({sizeText})</samp>",D='<div class="file-thumbnail-footer">\n <div class="file-footer-caption" title="{caption}">{caption}{size}</div>\n {progress} {actions}\n</div>',E='<div class="file-actions">\n <div class="file-footer-buttons">\n {upload} {delete} {zoom} {other} </div>\n {drag}\n <div class="file-upload-indicator" title="{indicatorTitle}">{indicator}</div>\n <div class="clearfix"></div>\n</div>',F='<button type="button" class="kv-file-remove {removeClass}" title="{removeTitle}" {dataUrl}{dataKey}>{removeIcon}</button>\n',G='<button type="button" class="kv-file-upload {uploadClass}" title="{uploadTitle}">{uploadIcon}</button>',H='<button type="button" class="kv-file-zoom {zoomClass}" title="{zoomTitle}">{zoomIcon}</button>',I='<span class="file-drag-handle {dragClass}" title="{dragTitle}">{dragIcon}</span>',J='<div class="file-preview-frame{frameClass}" id="{previewId}" data-fileindex="{fileindex}" data-template="{template}"',K=J+'><div class="kv-file-content">\n',L=J+' title="{caption}" '+d+'><div class="kv-file-content">\n',M="</div>{footer}\n</div>\n",N="{content}\n",O='<div class="kv-preview-data file-preview-html" title="{caption}" '+d+">{data}</div>\n",P='<img src="{data}" class="kv-preview-data file-preview-image" title="{caption}" alt="{caption}" '+d+">\n",Q='<textarea class="kv-preview-data file-preview-text" title="{caption}" readonly '+d+">{data}</textarea>\n",R='<video class="kv-preview-data" width="{width}" height="{height}" controls>\n<source src="{data}" type="{type}">\n'+f+"\n</video>\n",S='<audio class="kv-preview-data" controls>\n<source src="{data}" type="{type}">\n'+f+"\n</audio>\n",T='<object class="kv-preview-data file-object" type="application/x-shockwave-flash" width="{width}" height="{height}" data="{data}">\n'+e+" "+f+"\n</object>\n",U='<object class="kv-preview-data file-object" data="{data}" type="{type}" width="{width}" height="{height}">\n<param name="movie" value="{caption}" />\n'+e+" "+f+"\n</object>\n",V='<embed class="kv-preview-data" src="{data}" width="{width}" height="{height}" type="application/pdf">\n',W='<div class="kv-preview-data file-preview-other-frame">\n'+f+"\n</div>\n",Y={main1:q,main2:r,preview:s,close:u,fileIcon:t,caption:v,modalMain:z,modal:A,progress:B,size:C,footer:D,actions:E,actionDelete:F,actionUpload:G,actionZoom:H,actionDrag:I,btnDefault:w,btnLink:x,btnBrowse:y},Z={generic:K+N+M,html:K+O+M,image:K+P+M,text:K+Q+M,video:L+R+M,audio:L+S+M,flash:L+T+M,object:L+U+M,pdf:L+V+M,other:L+W+M},_=["image","html","text","video","audio","flash","pdf","object"],ba={image:{width:"auto",height:"160px"},html:{width:"213px",height:"160px"},text:{width:"213px",height:"160px"},video:{width:"213px",height:"160px"},audio:{width:"213px",height:"80px"},flash:{width:"213px",height:"160px"},object:{width:"160px",height:"160px"},pdf:{width:"160px",height:"160px"},other:{width:"160px",height:"160px"}},$={image:{width:"100%",height:"100%"},html:{width:"100%",height:"100%","min-height":"480px"},text:{width:"100%",height:"100%","min-height":"480px"},video:{width:"auto",height:"100%","max-width":"100%"},audio:{width:"100%",height:"30px"},flash:{width:"auto",height:"480px"},object:{width:"auto",height:"100%","min-height":"480px"},pdf:{width:"100%",height:"100%","min-height":"480px"},other:{width:"auto",height:"100%","min-height":"480px"}},ca={image:function(a,b){return h(a,"image.*")||h(b,/\.(gif|png|jpe?g)$/i)},html:function(a,b){return h(a,"text/html")||h(b,/\.(htm|html)$/i)},text:function(a,b){return h(a,"text.*")||h(b,/\.(xml|javascript)$/i)||h(b,/\.(txt|md|csv|nfo|ini|json|php|js|css)$/i)},video:function(a,b){return h(a,"video.*")&&(h(a,/(ogg|mp4|mp?g|webm|3gp)$/i)||h(b,/\.(og?|mp4|webm|mp?g|3gp)$/i))},audio:function(a,b){return h(a,"audio.*")&&(h(b,/(ogg|mp3|mp?g|wav)$/i)||h(b,/\.(og?|mp3|mp?g|wav)$/i))},flash:function(a,b){return h(a,"application/x-shockwave-flash",!0)||h(b,/\.(swf)$/i)},pdf:function(a,b){return h(a,"application/pdf",!0)||h(b,/\.(pdf)$/i)},object:function(){return!0},other:function(){return!0}},da=function(b,c){return void 0===b||null===b||0===b.length||c&&""===a.trim(b)},ea=function(a){return Array.isArray(a)||"[object Array]"===Object.prototype.toString.call(a)},fa=function(a,b,c){return c=c||"",b&&"object"==typeof b&&a in b?b[a]:c},aa=function(b,c,d){return da(b)||da(b[c])?d:a(b[c])},ga=function(){return Math.round((new Date).getTime()+100*Math.random())},ha=function(a){return a.replace(/&/g,"&").replace(/</g,"<").replace(/>/g,">").replace(/"/g,""").replace(/'/g,"'")},ia=function(b,c){var d=b;return c?(a.each(c,function(a,b){"function"==typeof b&&(b=b()),d=d.split(a).join(b)}),d):d},ja=function(a){var b=a.is("img")?a.attr("src"):a.find("source").attr("src");g.revokeObjectURL(b)},ka=function(a){var b=a.lastIndexOf("/");return b===-1&&(b=a.lastIndexOf("\\")),a.split(a.substring(b,b+1)).pop()},la=function(){return document.fullscreenElement||document.mozFullScreenElement||document.webkitFullscreenElement||document.msFullscreenElement},ma=function(a){a&&!la()?document.documentElement.requestFullscreen?document.documentElement.requestFullscreen():document.documentElement.msRequestFullscreen?document.documentElement.msRequestFullscreen():document.documentElement.mozRequestFullScreen?document.documentElement.mozRequestFullScreen():document.documentElement.webkitRequestFullscreen&&document.documentElement.webkitRequestFullscreen(Element.ALLOW_KEYBOARD_INPUT):document.exitFullscreen?document.exitFullscreen():document.msExitFullscreen?document.msExitFullscreen():document.mozCancelFullScreen?document.mozCancelFullScreen():document.webkitExitFullscreen&&document.webkitExitFullscreen()},na=function(a,b,c){if(c>=a.length)for(var d=c-a.length;d--+1;)a.push(void 0);return a.splice(c,0,a.splice(b,1)[0]),a},oa=function(b,c){var d=this;d.$element=a(b),d._validate()&&(d.isPreviewable=m(),d.isIE9=i(9),d.isIE10=i(10),d.isPreviewable||d.isIE9?(d._init(c),d._listen()):d.$element.removeClass("file-loading"))},oa.prototype={constructor:oa,_init:function(b){var e,c=this,d=c.$element;a.each(b,function(a,b){switch(a){case"minFileCount":case"maxFileCount":case"maxFileSize":c[a]=l(b);break;default:c[a]=b}}),c.fileInputCleared=!1,c.fileBatchCompleted=!0,c.isPreviewable||(c.showPreview=!1),c.uploadFileAttr=da(d.attr("name"))?"file_data":d.attr("name"),c.reader=null,c.formdata={},c.clearStack(),c.uploadCount=0,c.uploadStatus={},c.uploadLog=[],c.uploadAsyncCount=0,c.loadedImages=[],c.totalImagesCount=0,c.ajaxRequests=[],c.isError=!1,c.ajaxAborted=!1,c.cancelling=!1,e=c._getLayoutTemplate("progress"),c.progressTemplate=e.replace("{class}",c.progressClass),c.progressCompleteTemplate=e.replace("{class}",c.progressCompleteClass),c.progressErrorTemplate=e.replace("{class}",c.progressErrorClass),c.dropZoneEnabled=n()&&c.dropZoneEnabled,c.isDisabled=c.$element.attr("disabled")||c.$element.attr("readonly"),c.isUploadable=o()&&!da(c.uploadUrl),c.isClickable=c.browseOnZoneClick&&c.showPreview&&(c.isUploadable&&c.dropZoneEnabled||!da(c.defaultPreviewContent)),c.slug="function"==typeof b.slugCallback?b.slugCallback:c._slugDefault,c.mainTemplate=c.showCaption?c._getLayoutTemplate("main1"):c._getLayoutTemplate("main2"),c.captionTemplate=c._getLayoutTemplate("caption"),c.previewGenericTemplate=c._getPreviewTemplate("generic"),c.resizeImage&&(c.maxImageWidth||c.maxImageHeight)&&(c.imageCanvas=document.createElement("canvas"),c.imageCanvasContext=c.imageCanvas.getContext("2d")),da(c.$element.attr("id"))&&c.$element.attr("id",ga()),void 0===c.$container?c.$container=c._createContainer():c._refreshContainer(),c.$dropZone=c.$container.find(".file-drop-zone"),c.$progress=c.$container.find(".kv-upload-progress"),c.$btnUpload=c.$container.find(".fileinput-upload"),c.$captionContainer=aa(b,"elCaptionContainer",c.$container.find(".file-caption")),c.$caption=aa(b,"elCaptionText",c.$container.find(".file-caption-name")),c.$previewContainer=aa(b,"elPreviewContainer",c.$container.find(".file-preview")),c.$preview=aa(b,"elPreviewImage",c.$container.find(".file-preview-thumbnails")),c.$previewStatus=aa(b,"elPreviewStatus",c.$container.find(".file-preview-status")),c.$errorContainer=aa(b,"elErrorContainer",c.$previewContainer.find(".kv-fileinput-error")),da(c.msgErrorClass)||p(c.$errorContainer,c.msgErrorClass),c.$errorContainer.hide(),c.fileActionSettings=a.extend(!0,X,b.fileActionSettings),c.previewInitId="preview-"+ga(),c.id=c.$element.attr("id"),k.init(c),c._initPreview(!0),c._initPreviewActions(),c.options=b,c._setFileDropZoneTitle(),c.$element.removeClass("file-loading"),c.$element.attr("disabled")&&c.disable(),c._initZoom()},_validate:function(){var b,a=this;return"file"===a.$element.attr("type")||(b='<div class="help-block alert alert-warning"><h4>Invalid Input Type</h4>You must set an input <code>type = file</code> for <b>bootstrap-fileinput</b> plugin to initialize.</div>',a.$element.after(b),!1)},_errorsExist:function(){var c,b=this;return!!b.$errorContainer.find("li").length||(c=a(document.createElement("div")).html(b.$errorContainer.html()),c.find("span.kv-error-close").remove(),c.find("ul").remove(),!!a.trim(c.text()).length)},_errorHandler:function(a,b){var c=this,d=a.target.error;d.code===d.NOT_FOUND_ERR?c._showError(c.msgFileNotFound.replace("{name}",b)):d.code===d.SECURITY_ERR?c._showError(c.msgFileSecured.replace("{name}",b)):d.code===d.NOT_READABLE_ERR?c._showError(c.msgFileNotReadable.replace("{name}",b)):d.code===d.ABORT_ERR?c._showError(c.msgFilePreviewAborted.replace("{name}",b)):c._showError(c.msgFilePreviewError.replace("{name}",b))},_addError:function(a){var b=this,c=b.$errorContainer;a&&c.length&&(c.html(b.errorCloseButton+a),j(c.find(".kv-error-close"),"click",function(){c.fadeOut("slow")}))},_resetErrors:function(a){var b=this,c=b.$errorContainer;b.isError=!1,b.$container.removeClass("has-error"),c.html(""),a?c.fadeOut("slow"):c.hide()},_showFolderError:function(a){var d,b=this,c=b.$errorContainer;a&&(d=b.msgFoldersNotAllowed.replace(/\{n}/g,a),b._addError(d),p(b.$container,"has-error"),c.fadeIn(800),b._raise("filefoldererror",[a,d]))},_showUploadError:function(a,b,c){var d=this,e=d.$errorContainer,f=c||"fileuploaderror",g=b&&b.id?'<li data-file-id="'+b.id+'">'+a+"</li>":"<li>"+a+"</li>";return 0===e.find("ul").length?d._addError("<ul>"+g+"</ul>"):e.find("ul").append(g),e.fadeIn(800),d._raise(f,[b,a]),d.$container.removeClass("file-input-new"),p(d.$container,"has-error"),!0},_showError:function(a,b,c){var d=this,e=d.$errorContainer,f=c||"fileerror";return b=b||{},b.reader=d.reader,d._addError(a),e.fadeIn(800),d._raise(f,[b,a]),d.isUploadable||d._clearFileInput(),d.$container.removeClass("file-input-new"),p(d.$container,"has-error"),d.$btnUpload.attr("disabled",!0),!0},_noFilesError:function(a){var b=this,c=b.minFileCount>1?b.filePlural:b.fileSingle,d=b.msgFilesTooLess.replace("{n}",b.minFileCount).replace("{files}",c),e=b.$errorContainer;b._addError(d),b.isError=!0,b._updateFileDetails(0),e.fadeIn(800),b._raise("fileerror",[a,d]),b._clearFileInput(),p(b.$container,"has-error")},_parseError:function(b,c,d){var e=this,f=a.trim(c+""),g="."===f.slice(-1)?"":".",h=void 0!==b.responseJSON&&void 0!==b.responseJSON.error?b.responseJSON.error:b.responseText;return e.cancelling&&e.msgUploadAborted&&(f=e.msgUploadAborted),e.showAjaxErrorDetails&&h?(h=a.trim(h.replace(/\n\s*\n/g,"\n")),h=h.length>0?"<pre>"+h+"</pre>":"",f+=g+h):f+=g,e.cancelling=!1,d?"<b>"+d+": </b>"+f:f},_parseFileType:function(a){var c,d,e,f,b=this;for(f=0;f<_.length;f+=1)if(e=_[f],c=fa(e,b.fileTypeSettings,ca[e]),d=c(a.type,a.name)?e:"",!da(d))return d;return"other"},_parseFilePreviewIcon:function(b,c){var e,f,d=this,g=d.previewFileIcon;return c&&c.indexOf(".")>-1&&(f=c.split(".").pop(),d.previewFileIconSettings&&d.previewFileIconSettings[f]&&(g=d.previewFileIconSettings[f]),d.previewFileExtSettings&&a.each(d.previewFileExtSettings,function(a,b){return d.previewFileIconSettings[a]&&b(f)?void(g=d.previewFileIconSettings[a]):void(e=!0)})),b.indexOf("{previewFileIcon}")>-1?b.replace(/\{previewFileIconClass}/g,d.previewFileIconClass).replace(/\{previewFileIcon}/g,g):b},_raise:function(b,c){var d=this,e=a.Event(b);if(void 0!==c?d.$element.trigger(e,c):d.$element.trigger(e),e.isDefaultPrevented())return!1;if(!e.result)return e.result;switch(b){case"filebatchuploadcomplete":case"filebatchuploadsuccess":case"fileuploaded":case"fileclear":case"filecleared":case"filereset":case"fileerror":case"filefoldererror":case"fileuploaderror":case"filebatchuploaderror":case"filedeleteerror":case"filecustomerror":case"filesuccessremove":break;default:d.ajaxAborted=e.result}return!0},_listenFullScreen:function(a){var d,e,b=this,c=b.$modal;c&&c.length&&(d=c&&c.find(".btn-fullscreen"),e=c&&c.find(".btn-borderless"),d.length&&e.length&&(d.removeClass("active").attr("aria-pressed","false"),e.removeClass("active").attr("aria-pressed","false"),a?d.addClass("active").attr("aria-pressed","true"):e.addClass("active").attr("aria-pressed","true"),c.hasClass("file-zoom-fullscreen")?b._maximizeZoomDialog():a?b._maximizeZoomDialog():e.removeClass("active").attr("aria-pressed","false")))},_listen:function(){var b=this,c=b.$element,d=c.closest("form"),e=b.$container;j(c,"change",a.proxy(b._change,b)),b.showBrowse&&j(b.$btnFile,"click",a.proxy(b._browse,b)),j(d,"reset",a.proxy(b.reset,b)),j(e.find(".fileinput-remove:not([disabled])"),"click",a.proxy(b.clear,b)),j(e.find(".fileinput-cancel"),"click",a.proxy(b.cancel,b)),b._initDragDrop(),b.isUploadable||j(d,"submit",a.proxy(b._submitForm,b)),j(b.$container.find(".fileinput-upload"),"click",a.proxy(b._uploadClick,b)),j(a(window),"resize",function(){b._listenFullScreen(screen.width===window.innerWidth&&screen.height===window.innerHeight)}),j(a(document),"webkitfullscreenchange mozfullscreenchange fullscreenchange MSFullscreenChange",function(){b._listenFullScreen(la())}),b._initClickable()},_initClickable:function(){var c,b=this;b.isClickable&&(c=b.isUploadable?b.$dropZone:b.$preview.find(".file-default-preview"),p(c,"clickable"),c.attr("tabindex",-1),j(c,"click",function(d){var e=a(d.target);e.parents(".file-preview-thumbnails").length&&!e.parents(".file-default-preview").length||(b.$element.trigger("click"),c.blur())}))},_initDragDrop:function(){var b=this,c=b.$dropZone;b.isUploadable&&b.dropZoneEnabled&&b.showPreview&&(j(c,"dragenter dragover",a.proxy(b._zoneDragEnter,b)),j(c,"dragleave",a.proxy(b._zoneDragLeave,b)),j(c,"drop",a.proxy(b._zoneDrop,b)),j(a(document),"dragenter dragover drop",b._zoneDragDropInit))},_zoneDragDropInit:function(a){a.stopPropagation(),a.preventDefault()},_zoneDragEnter:function(b){var c=this,d=a.inArray("Files",b.originalEvent.dataTransfer.types)>-1;return c._zoneDragDropInit(b),c.isDisabled||!d?(b.originalEvent.dataTransfer.effectAllowed="none",void(b.originalEvent.dataTransfer.dropEffect="none")):void p(c.$dropZone,"file-highlighted")},_zoneDragLeave:function(a){var b=this;b._zoneDragDropInit(a),b.isDisabled||b.$dropZone.removeClass("file-highlighted")},_zoneDrop:function(a){var b=this;a.preventDefault(),b.isDisabled||da(a.originalEvent.dataTransfer.files)||(b._change(a,"dragdrop"),b.$dropZone.removeClass("file-highlighted"))},_uploadClick:function(a){var d,b=this,c=b.$container.find(".fileinput-upload"),e=!c.hasClass("disabled")&&da(c.attr("disabled"));if(!a||!a.isDefaultPrevented()){if(!b.isUploadable)return void(e&&"submit"!==c.attr("type")&&(d=c.closest("form"),d.length&&d.trigger("submit"),a.preventDefault()));a.preventDefault(),e&&b.upload()}},_submitForm:function(){var a=this,b=a.$element,c=b.get(0).files;return c&&a.minFileCount>0&&a._getFileCount(c.length)<a.minFileCount?(a._noFilesError({}),!1):!a._abort({})},_clearPreview:function(){var a=this,b=a.showUploadedThumbs?a.$preview.find(".file-preview-frame:not(.file-preview-success)"):a.$preview.find(".file-preview-frame");b.remove(),a.$preview.find(".file-preview-frame").length&&a.showPreview||a._resetUpload(),a._validateDefaultPreview()},_initSortable:function(){var d,e,b=this,c=b.$preview;window.KvSortable&&(d=c.find(".file-initial-thumbs"),e={handle:".drag-handle-init",dataIdAttr:"data-preview-id",draggable:".file-preview-initial",onSort:function(c){var d=c.oldIndex,e=c.newIndex;b.initialPreview=na(b.initialPreview,d,e),b.initialPreviewConfig=na(b.initialPreviewConfig,d,e),k.init(b),b._raise("filesorted",{previewId:a(c.item).attr("id"),oldIndex:d,newIndex:e,stack:b.initialPreviewConfig})}},d.data("kvsortable")&&d.kvsortable("destroy"),a.extend(!0,e,b.fileActionSettings.dragSettings),d.kvsortable(e))},_initPreview:function(a){var d,b=this,c=b.initialCaption||"";return k.count(b.id)?(d=k.out(b.id),c=a&&b.initialCaption?b.initialCaption:d.caption,b.$preview.html(d.content),b._setCaption(c),b._initSortable(),void(da(d.content)||b.$container.removeClass("file-input-new"))):(b._clearPreview(),void(a?b._setCaption(c):b._initCaption()))},_getZoomButton:function(a){var b=this,c=b.previewZoomButtonIcons[a],d=b.previewZoomButtonClasses[a],e=' title="'+(b.previewZoomButtonTitles[a]||"")+'" ',f=e+("close"===a?' data-dismiss="modal" aria-hidden="true"':"");return"fullscreen"!==a&&"borderless"!==a&&"toggleheader"!==a||(f+=' data-toggle="button" aria-pressed="false" autocomplete="off"'),'<button type="button" class="'+d+" btn-"+a+'"'+f+">"+c+"</button>"},_getModalContent:function(){var a=this;return a._getLayoutTemplate("modal").replace(/\{heading}/g,a.msgZoomModalHeading).replace(/\{prev}/g,a._getZoomButton("prev")).replace(/\{next}/g,a._getZoomButton("next")).replace(/\{toggleheader}/g,a._getZoomButton("toggleheader")).replace(/\{fullscreen}/g,a._getZoomButton("fullscreen")).replace(/\{borderless}/g,a._getZoomButton("borderless")).replace(/\{close}/g,a._getZoomButton("close"))},_listenModalEvent:function(a){var b=this,c=b.$modal,d=function(a){return{sourceEvent:a,previewId:c.data("previewId"),modal:c}};c.on(a+".bs.modal",function(e){var f=c.find(".btn-fullscreen"),g=c.find(".btn-borderless");b._raise("filezoom"+a,d(e)),"shown"===a&&(g.removeClass("active").attr("aria-pressed","false"),f.removeClass("active").attr("aria-pressed","false"),c.hasClass("file-zoom-fullscreen")&&(b._maximizeZoomDialog(),la()?f.addClass("active").attr("aria-pressed","true"):g.addClass("active").attr("aria-pressed","true")))})},_initZoom:function(){var d,b=this,e=b._getLayoutTemplate("modalMain"),f="#"+c;b.$modal=a(f),b.$modal&&b.$modal.length||(d=a(document.createElement("div")).html(e).insertAfter(b.$container),b.$modal=a("#"+c).insertBefore(d),d.remove()),b.$modal.html(b._getModalContent()),b._listenModalEvent("show"),b._listenModalEvent("shown"),b._listenModalEvent("hide"),b._listenModalEvent("hidden"),b._listenModalEvent("loaded")},_initZoomButtons:function(){var d,e,b=this,c=b.$modal.data("previewId")||"",f=b.$preview.find(".file-preview-frame").toArray(),g=f.length,h=b.$modal.find(".btn-prev"),i=b.$modal.find(".btn-next");g&&(d=a(f[0]),e=a(f[g-1]),h.removeAttr("disabled"),i.removeAttr("disabled"),d.length&&d.attr("id")===c&&h.attr("disabled",!0),e.length&&e.attr("id")===c&&i.attr("disabled",!0))},_maximizeZoomDialog:function(){var b=this,c=b.$modal,d=c.find(".modal-header:visible"),e=c.find(".modal-footer:visible"),f=c.find(".modal-body"),g=a(window).height(),h=0;c.addClass("file-zoom-fullscreen"),d&&d.length&&(g-=d.outerHeight(!0)),e&&e.length&&(g-=e.outerHeight(!0)),f&&f.length&&(h=f.outerHeight(!0)-f.height(),g-=h),c.find(".kv-zoom-body").height(g)},_resizeZoomDialog:function(a){var b=this,c=b.$modal,d=c.find(".btn-fullscreen"),e=c.find(".btn-borderless");if(c.hasClass("file-zoom-fullscreen"))ma(!1),a?d.hasClass("active")||(c.removeClass("file-zoom-fullscreen"),b._resizeZoomDialog(!0),e.hasClass("active")&&e.removeClass("active").attr("aria-pressed","false")):d.hasClass("active")?d.removeClass("active").attr("aria-pressed","false"):(c.removeClass("file-zoom-fullscreen"),b.$modal.find(".kv-zoom-body").css("height",b.zoomModalHeight));else{if(!a)return void b._maximizeZoomDialog();ma(!0)}c.focus()},_setZoomContent:function(b,c){var e,f,g,h,i,k,l,r,d=this,m=b.attr("id"),n=d.$modal,o=n.find(".btn-prev"),q=n.find(".btn-next"),s=n.find(".btn-fullscreen"),t=n.find(".btn-borderless"),u=n.find(".btn-toggleheader");f=b.data("template")||"generic",e=b.find(".kv-file-content"),g=e.length?e.html():"",h=b.find(".file-footer-caption").text()||"",n.find(".kv-zoom-title").html(h),i=n.find(".kv-zoom-body"),c?(r=i.clone().insertAfter(i),i.html(g).hide(),r.fadeOut("fast",function(){i.fadeIn("fast"),r.remove()})):i.html(g),l=d.previewZoomSettings[f],l&&(k=i.find(".kv-preview-data"),p(k,"file-zoom-detail"),a.each(l,function(a,b){k.css(a,b),(k.attr("width")&&"width"===a||k.attr("height")&&"height"===a)&&k.removeAttr(a)})),n.data("previewId",m),j(o,"click",function(){d._zoomSlideShow("prev",m)}),j(q,"click",function(){d._zoomSlideShow("next",m)}),j(s,"click",function(){d._resizeZoomDialog(!0)}),j(t,"click",function(){d._resizeZoomDialog(!1)}),j(u,"click",function(){var c,a=n.find(".modal-header"),b=n.find(".modal-body .floating-buttons"),e=a.find(".kv-zoom-actions"),f=function(b){var c=d.$modal.find(".kv-zoom-body"),e=d.zoomModalHeight;n.hasClass("file-zoom-fullscreen")&&(e=c.outerHeight(!0),b||(e-=a.outerHeight(!0))),c.css("height",b?e+b:e)};a.is(":visible")?(c=a.outerHeight(!0),a.slideUp("slow",function(){e.find(".btn").appendTo(b),f(c)})):(b.find(".btn").appendTo(e),a.slideDown("slow",function(){f()})),n.focus()}),j(n,"keydown",function(a){var b=a.which||a.keyCode;37!==b||o.attr("disabled")||d._zoomSlideShow("prev",m),39!==b||q.attr("disabled")||d._zoomSlideShow("next",m)})},_zoomPreview:function(a){var c,b=this;if(!a.length)throw"Cannot zoom to detailed preview!";b.$modal.html(b._getModalContent()),c=a.closest(".file-preview-frame"),b._setZoomContent(c),b.$modal.modal("show"),b._initZoomButtons()},_zoomSlideShow:function(b,c){var f,g,j,d=this,e=d.$modal.find(".kv-zoom-actions .btn-"+b),h=d.$preview.find(".file-preview-frame").toArray(),i=h.length;if(!e.attr("disabled")){for(g=0;g<i;g++)if(a(h[g]).attr("id")===c){j="prev"===b?g-1:g+1;break}j<0||j>=i||!h[j]||(f=a(h[j]),f.length&&d._setZoomContent(f,!0),d._initZoomButtons(),d._raise("filezoom"+b,{previewId:c,modal:d.$modal}))}},_initZoomButton:function(){var b=this;b.$preview.find(".kv-file-zoom").each(function(){var c=a(this);j(c,"click",function(){b._zoomPreview(c)})})},_initPreviewActions:function(){var b=this,c=b.deleteExtraData||{},d=function(){var a=b.isUploadable?k.count(b.id):b.$element.get(0).files.length;0!==b.$preview.find(".kv-file-remove").length||a||(b.reset(),b.initialCaption="")};b._initZoomButton(),b.$preview.find(".kv-file-remove").each(function(){var e=a(this),f=e.data("url")||b.deleteUrl,g=e.data("key");if(!da(f)&&void 0!==g){var l,m,o,q,h=e.closest(".file-preview-frame"),i=k.data[b.id],n=h.data("fileindex");n=parseInt(n.replace("init_","")),o=da(i.config)&&da(i.config[n])?null:i.config[n],q=da(o)||da(o.extra)?c:o.extra,"function"==typeof q&&(q=q()),m={id:e.attr("id"),key:g,extra:q},l=a.extend(!0,{},{url:f,type:"POST",dataType:"json",data:a.extend(!0,{},{key:g},q),beforeSend:function(a){b.ajaxAborted=!1,b._raise("filepredelete",[g,a,q]),b.ajaxAborted?a.abort():(p(h,"file-uploading"),p(e,"disabled"))},success:function(a,c,f){var i,j;return da(a)||da(a.error)?(k.unset(b.id,n),i=k.count(b.id),j=i>0?b._getMsgSelected(i):"",b._raise("filedeleted",[g,f,q]),b._setCaption(j),h.removeClass("file-uploading").addClass("file-deleted"),void h.fadeOut("slow",function(){b._clearObjects(h),h.remove(),d(),i||0!==b.getFileStack().length||(b._setCaption(""),b.reset())})):(m.jqXHR=f,m.response=a,b._showError(a.error,m,"filedeleteerror"),h.removeClass("file-uploading"),e.removeClass("disabled"),void d())},error:function(a,c,e){var f=b._parseError(a,e);
m.jqXHR=a,m.response={},b._showError(f,m,"filedeleteerror"),h.removeClass("file-uploading"),d()}},b.ajaxDeleteSettings),j(e,"click",function(){return!!b._validateMinCount()&&void a.ajax(l)})}})},_clearObjects:function(b){b.find("video audio").each(function(){this.pause(),a(this).remove()}),b.find("img object div").each(function(){a(this).remove()})},_clearFileInput:function(){var d,e,f,b=this,c=b.$element;da(c.val())||(b.isIE9||b.isIE10?(d=c.closest("form"),e=a(document.createElement("form")),f=a(document.createElement("div")),c.before(f),d.length?d.after(e):f.after(e),e.append(c).trigger("reset"),f.before(c).remove(),e.remove()):c.val(""),b.fileInputCleared=!0)},_resetUpload:function(){var a=this;a.uploadCache={content:[],config:[],tags:[],append:!0},a.uploadCount=0,a.uploadStatus={},a.uploadLog=[],a.uploadAsyncCount=0,a.loadedImages=[],a.totalImagesCount=0,a.$btnUpload.removeAttr("disabled"),a._setProgress(0),p(a.$progress,"hide"),a._resetErrors(!1),a.ajaxAborted=!1,a.ajaxRequests=[],a._resetCanvas()},_resetCanvas:function(){var a=this;a.canvas&&a.imageCanvasContext&&a.imageCanvasContext.clearRect(0,0,a.canvas.width,a.canvas.height)},_hasInitialPreview:function(){var a=this;return!a.overwriteInitial&&k.count(a.id)},_resetPreview:function(){var b,c,a=this;k.count(a.id)?(b=k.out(a.id),a.$preview.html(b.content),c=a.initialCaption?a.initialCaption:b.caption,a._setCaption(c)):(a._clearPreview(),a._initCaption()),a.showPreview&&(a._initZoom(),a._initSortable())},_clearDefaultPreview:function(){var a=this;a.$preview.find(".file-default-preview").remove()},_validateDefaultPreview:function(){var a=this;a.showPreview&&!da(a.defaultPreviewContent)&&(a.$preview.html('<div class="file-default-preview">'+a.defaultPreviewContent+"</div>"),a.$container.removeClass("file-input-new"),a._initClickable())},_resetPreviewThumbs:function(a){var c,b=this;return a?(b._clearPreview(),void b.clearStack()):void(b._hasInitialPreview()?(c=k.out(b.id),b.$preview.html(c.content),b._setCaption(c.caption),b._initPreviewActions()):b._clearPreview())},_getLayoutTemplate:function(a){var b=this,c=fa(a,b.layoutTemplates,Y[a]);return da(b.customLayoutTags)?c:ia(c,b.customLayoutTags)},_getPreviewTemplate:function(a){var b=this,c=fa(a,b.previewTemplates,Z[a]);return da(b.customPreviewTags)?c:ia(c,b.customPreviewTags)},_getOutData:function(a,b,c){var d=this;return a=a||{},b=b||{},c=c||d.filestack.slice(0)||{},{form:d.formdata,files:c,filenames:d.filenames,filescount:d.getFilesCount(),extra:d._getExtraData(),response:b,reader:d.reader,jqXHR:a}},_getMsgSelected:function(a){var b=this,c=1===a?b.fileSingle:b.filePlural;return a>0?b.msgSelected.replace("{n}",a).replace("{files}",c):b.msgNoFilesSelected},_getThumbs:function(a){return a=a||"",this.$preview.find(".file-preview-frame:not(.file-preview-initial)"+a)},_getExtraData:function(a,b){var c=this,d=c.uploadExtraData;return"function"==typeof c.uploadExtraData&&(d=c.uploadExtraData(a,b)),d},_initXhr:function(a,b,c){var d=this;return a.upload&&a.upload.addEventListener("progress",function(a){var e=0,f=a.total,g=a.loaded||a.position;a.lengthComputable&&(e=Math.floor(g/f*100)),b?d._setAsyncUploadStatus(b,e,c):d._setProgress(e)},!1),a},_ajaxSubmit:function(b,c,d,e,f,g){var i,h=this;h._raise("filepreajax",[f,g]),h._uploadExtra(f,g),i=a.extend(!0,{},{xhr:function(){var b=a.ajaxSettings.xhr();return h._initXhr(b,f,h.getFileStack().length)},url:h.uploadUrl,type:"POST",dataType:"json",data:h.formdata,cache:!1,processData:!1,contentType:!1,beforeSend:b,success:c,complete:d,error:e},h.ajaxSettings),h.ajaxRequests.push(a.ajax(i))},_initUploadSuccess:function(b,c,d){var f,g,h,i,j,l,m,n,e=this,o=function(a,b){e[a]instanceof Array||(e[a]=[]),b&&b.length&&(e[a]=e[a].concat(b))};e.showPreview&&"object"==typeof b&&!a.isEmptyObject(b)&&void 0!==b.initialPreview&&b.initialPreview.length>0&&(e.hasInitData=!0,j=b.initialPreview||[],l=b.initialPreviewConfig||[],m=b.initialPreviewThumbTags||[],f=!(void 0!==b.append&&!b.append),j.length>0&&!ea(j)&&(j=j.split(e.initialPreviewDelimiter)),e.overwriteInitial=!1,o("initialPreview",j),o("initialPreviewConfig",l),o("initialPreviewThumbTags",m),void 0!==c?d?(n=c.attr("data-fileindex"),e.uploadCache.content[n]=j[0],e.uploadCache.config[n]=l[0]||[],e.uploadCache.tags[n]=m[0]||[],e.uploadCache.append=f):(h=k.add(e.id,j,l[0],m[0],f),g=k.get(e.id,h,!1),i=a(g).hide(),c.after(i).fadeOut("slow",function(){i.fadeIn("slow").css("display:inline-block"),e._initPreviewActions(),e._clearFileInput(),c.remove()})):(k.set(e.id,j,l,m,f),e._initPreview(),e._initPreviewActions()))},_initSuccessThumbs:function(){var b=this;b.showPreview&&b._getThumbs(".file-preview-success").each(function(){var c=a(this),d=c.find(".kv-file-remove");d.removeAttr("disabled"),j(d,"click",function(){var a=b._raise("filesuccessremove",[c.attr("id"),c.data("fileindex")]);ja(c),a!==!1&&c.fadeOut("slow",function(){c.remove(),b.$preview.find(".file-preview-frame").length||b.reset()})})})},_checkAsyncComplete:function(){var c,d,b=this;for(d=0;d<b.filestack.length;d++)if(b.filestack[d]&&(c=b.previewInitId+"-"+d,a.inArray(c,b.uploadLog)===-1))return!1;return b.uploadAsyncCount===b.uploadLog.length},_uploadExtra:function(b,c){var d=this,e=d._getExtraData(b,c);0!==e.length&&a.each(e,function(a,b){d.formdata.append(a,b)})},_uploadSingle:function(b,c,d){var h,j,l,m,n,q,r,s,t,u,e=this,f=e.getFileStack().length,g=new FormData,i=e.previewInitId+"-"+b,o=e.filestack.length>0||!a.isEmptyObject(e.uploadExtraData),v={id:i,index:b};e.formdata=g,e.showPreview&&(j=a("#"+i+":not(.file-preview-initial)"),m=j.find(".kv-file-upload"),n=j.find(".kv-file-remove"),a("#"+i).find(".file-thumb-progress").removeClass("hide")),0===f||!o||m&&m.hasClass("disabled")||e._abort(v)||(u=function(a,b){e.updateStack(a,void 0),e.uploadLog.push(b),e._checkAsyncComplete()&&(e.fileBatchCompleted=!0)},l=function(){var a=e.uploadCache;e.fileBatchCompleted&&setTimeout(function(){e.showPreview&&(k.set(e.id,a.content,a.config,a.tags,a.append),e.hasInitData&&(e._initPreview(),e._initPreviewActions())),e.unlock(),e._clearFileInput(),e._raise("filebatchuploadcomplete",[e.filestack,e._getExtraData()]),e.uploadCount=0,e.uploadStatus={},e.uploadLog=[],e._setProgress(101)},100)},q=function(c){h=e._getOutData(c),e.fileBatchCompleted=!1,e.showPreview&&(j.hasClass("file-preview-success")||(e._setThumbStatus(j,"Loading"),p(j,"file-uploading")),m.attr("disabled",!0),n.attr("disabled",!0)),d||e.lock(),e._raise("filepreupload",[h,i,b]),a.extend(!0,v,h),e._abort(v)&&(c.abort(),e._setProgressCancelled())},r=function(c,f,g){h=e._getOutData(g,c),a.extend(!0,v,h),setTimeout(function(){da(c)||da(c.error)?(e.showPreview&&(e._setThumbStatus(j,"Success"),m.hide(),e._initUploadSuccess(c,j,d)),e._raise("fileuploaded",[h,j.attr("id"),b]),d?u(b,i):e.updateStack(b,void 0)):(e._showUploadError(c.error,v),e._setPreviewError(j,b),d&&u(b,i))},100)},s=function(){setTimeout(function(){e.showPreview&&(m.removeAttr("disabled"),n.removeAttr("disabled"),j.removeClass("file-uploading"),e._setProgress(101,a("#"+i).find(".file-thumb-progress"))),d?l():(e.unlock(!1),e._clearFileInput()),e._initSuccessThumbs()},100)},t=function(f,g,h){var k=e._parseError(f,h,d?c[b].name:null);setTimeout(function(){d&&u(b,i),e.uploadStatus[i]=100,e._setPreviewError(j,b),a.extend(!0,v,e._getOutData(f)),e._showUploadError(k,v)},100)},g.append(e.uploadFileAttr,c[b],e.filenames[b]),g.append("file_id",b),e._ajaxSubmit(q,r,s,t,i,b))},_uploadBatch:function(){var f,g,h,i,k,b=this,c=b.filestack,d=c.length,e={},j=b.filestack.length>0||!a.isEmptyObject(b.uploadExtraData);b.formdata=new FormData,0!==d&&j&&!b._abort(e)&&(k=function(){a.each(c,function(a){b.updateStack(a,void 0)}),b._clearFileInput()},f=function(c){b.lock();var d=b._getOutData(c);b.showPreview&&b._getThumbs().each(function(){var c=a(this),d=c.find(".kv-file-upload"),e=c.find(".kv-file-remove");c.hasClass("file-preview-success")||(b._setThumbStatus(c,"Loading"),p(c,"file-uploading")),d.attr("disabled",!0),e.attr("disabled",!0)}),b._raise("filebatchpreupload",[d]),b._abort(d)&&(c.abort(),b._setProgressCancelled())},g=function(c,d,e){var f=b._getOutData(e,c),g=b._getThumbs(":not(.file-preview-error)"),h=0,i=da(c)||da(c.errorkeys)?[]:c.errorkeys;da(c)||da(c.error)?(b._raise("filebatchuploadsuccess",[f]),k(),b.showPreview?(g.each(function(){var c=a(this),d=c.find(".kv-file-upload");c.find(".kv-file-upload").hide(),b._setThumbStatus(c,"Success"),c.removeClass("file-uploading"),d.removeAttr("disabled")}),b._initUploadSuccess(c)):b.reset()):(b.showPreview&&(g.each(function(){var c=a(this),d=c.find(".kv-file-remove"),e=c.find(".kv-file-upload");return c.removeClass("file-uploading"),e.removeAttr("disabled"),d.removeAttr("disabled"),0===i.length?void b._setPreviewError(c):(a.inArray(h,i)!==-1?b._setPreviewError(c):(c.find(".kv-file-upload").hide(),b._setThumbStatus(c,"Success"),b.updateStack(h,void 0)),void h++)}),b._initUploadSuccess(c)),b._showUploadError(c.error,f,"filebatchuploaderror"))},i=function(){b._setProgress(101),b.unlock(),b._initSuccessThumbs(),b._clearFileInput(),b._raise("filebatchuploadcomplete",[b.filestack,b._getExtraData()])},h=function(c,e,f){var g=b._getOutData(c),h=b._parseError(c,f);b._showUploadError(h,g,"filebatchuploaderror"),b.uploadFileCount=d-1,b.showPreview&&(b._getThumbs().each(function(){var c=a(this),d=c.attr("data-fileindex");c.removeClass("file-uploading"),void 0!==b.filestack[d]&&b._setPreviewError(c)}),b._getThumbs().removeClass("file-uploading"),b._getThumbs(" .kv-file-upload").removeAttr("disabled"),b._getThumbs(" .kv-file-delete").removeAttr("disabled"))},a.each(c,function(a,d){da(c[a])||b.formdata.append(b.uploadFileAttr,d,b.filenames[a])}),b._ajaxSubmit(f,g,i,h))},_uploadExtraOnly:function(){var c,d,e,f,a=this,b={};a.formdata=new FormData,a._abort(b)||(c=function(c){a.lock();var d=a._getOutData(c);a._raise("filebatchpreupload",[d]),a._setProgress(50),b.data=d,b.xhr=c,a._abort(b)&&(c.abort(),a._setProgressCancelled())},d=function(b,c,d){var e=a._getOutData(d,b);da(b)||da(b.error)?(a._raise("filebatchuploadsuccess",[e]),a._clearFileInput(),a._initUploadSuccess(b)):a._showUploadError(b.error,e,"filebatchuploaderror")},e=function(){a._setProgress(101),a.unlock(),a._clearFileInput(),a._raise("filebatchuploadcomplete",[a.filestack,a._getExtraData()])},f=function(c,d,e){var f=a._getOutData(c),g=a._parseError(c,e);b.data=f,a._showUploadError(g,f,"filebatchuploaderror")},a._ajaxSubmit(c,d,e,f))},_initFileActions:function(){var b=this;b.showPreview&&(b._initZoomButton(),b.$preview.find(".kv-file-remove").each(function(){var e,h,i,l,c=a(this),d=c.closest(".file-preview-frame"),f=d.attr("id"),g=d.attr("data-fileindex");j(c,"click",function(){return l=b._raise("filepreremove",[f,g]),!(l===!1||!b._validateMinCount())&&(e=d.hasClass("file-preview-error"),ja(d),void d.fadeOut("slow",function(){b.updateStack(g,void 0),b._clearObjects(d),d.remove(),f&&e&&b.$errorContainer.find('li[data-file-id="'+f+'"]').fadeOut("fast",function(){a(this).remove(),b._errorsExist()||b._resetErrors()}),b._clearFileInput();var c=b.getFileStack(!0),j=k.count(b.id),l=c.length,m=b.showPreview&&b.$preview.find(".file-preview-frame").length;0!==l||0!==j||m?(h=j+l,i=h>1?b._getMsgSelected(h):c[0]?b._getFileNames()[0]:"",b._setCaption(i)):b.reset(),b._raise("fileremoved",[f,g])}))})}),b.$preview.find(".kv-file-upload").each(function(){var c=a(this);j(c,"click",function(){var a=c.closest(".file-preview-frame"),d=a.attr("data-fileindex");a.hasClass("file-preview-error")||b._uploadSingle(d,b.filestack,!1)})}))},_hideFileIcon:function(){this.overwriteInitial&&this.$captionContainer.find(".kv-caption-icon").hide()},_showFileIcon:function(){this.$captionContainer.find(".kv-caption-icon").show()},_getSize:function(a){var b=parseFloat(a);if(null===a||isNaN(b))return"";var d,f,g,c=this,e=c.fileSizeGetter;return"function"==typeof e?g=e(a):(d=Math.floor(Math.log(b)/Math.log(1024)),f=["B","KB","MB","GB","TB","PB","EB","ZB","YB"],g=1*(b/Math.pow(1024,d)).toFixed(2)+" "+f[d]),c._getLayoutTemplate("size").replace("{sizeText}",g)},_generatePreviewTemplate:function(a,b,c,d,e,f,g,h,i,j){var m,n,k=this,l=k._getPreviewTemplate(a),o=h||"",p=fa(a,k.previewSettings,ba[a]),q=k.slug(c),r=i||k._renderFileFooter(q,g,p.width,f);return j=j||e.slice(e.lastIndexOf("-")+1),l=k._parseFilePreviewIcon(l,c),"text"===a||"html"===a?(n="text"===a?ha(b):b,m=l.replace(/\{previewId}/g,e).replace(/\{caption}/g,q).replace(/\{width}/g,p.width).replace(/\{height}/g,p.height).replace(/\{frameClass}/g,o).replace(/\{cat}/g,d).replace(/\{footer}/g,r).replace(/\{fileindex}/g,j).replace(/\{data}/g,n).replace(/\{template}/g,a)):m=l.replace(/\{previewId}/g,e).replace(/\{caption}/g,q).replace(/\{frameClass}/g,o).replace(/\{type}/g,d).replace(/\{fileindex}/g,j).replace(/\{width}/g,p.width).replace(/\{height}/g,p.height).replace(/\{footer}/g,r).replace(/\{data}/g,b).replace(/\{template}/g,a),m},_previewDefault:function(b,c,d){var e=this,f=e.$preview,h=f.find(".file-live-thumbs");if(e.showPreview){var k,i=b?b.name:"",j=b?b.type:"",l=d===!0&&!e.isUploadable,m=g.createObjectURL(b);e._clearDefaultPreview(),k=e._generatePreviewTemplate("other",m,i,j,c,l,b.size),h.length||(h=a(document.createElement("div")).addClass("file-live-thumbs").appendTo(f)),h.append("\n"+k),d===!0&&e.isUploadable&&e._setThumbStatus(a("#"+c),"Error")}},_previewFile:function(b,c,d,e,f){if(this.showPreview){var q,g=this,h=g._parseFileType(c),i=c?c.name:"",j=g.slug(i),k=g.allowedPreviewTypes,l=g.allowedPreviewMimeTypes,m=g.$preview,n=k&&k.indexOf(h)>=0,o=m.find(".file-live-thumbs"),p="text"===h||"html"===h||"image"===h?d.target.result:f,r=l&&l.indexOf(c.type)!==-1;o.length||(o=a(document.createElement("div")).addClass("file-live-thumbs").appendTo(m)),"html"===h&&g.purifyHtml&&window.DOMPurify&&(p=window.DOMPurify.sanitize(p)),n||r?(q=g._generatePreviewTemplate(h,p,i,c.type,e,!1,c.size),g._clearDefaultPreview(),o.append("\n"+q),g._validateImage(b,e,j,c.type)):g._previewDefault(c,e),g._initSortable()}},_slugDefault:function(a){return da(a)?"":String(a).replace(/[\-\[\]\/\{}:;#%=\(\)\*\+\?\\\^\$\|<>&"']/g,"_")},_readFiles:function(b){this.reader=new FileReader;var q,c=this,d=c.$element,e=c.$preview,f=c.reader,i=c.$previewContainer,j=c.$previewStatus,k=c.msgLoading,l=c.msgProgress,m=c.previewInitId,n=b.length,o=c.fileTypeSettings,p=c.filestack.length,r=c.maxFilePreviewSize&&parseFloat(c.maxFilePreviewSize),s=e.length&&(!r||isNaN(r)),t=function(d,e,f,g){var h=a.extend(!0,{},c._getOutData({},{},b),{id:f,index:g}),i={id:f,index:g,file:e,files:b};return c._previewDefault(e,f,!0),c.isUploadable&&c.addToStack(void 0),setTimeout(function(){q(g+1)},100),c._initFileActions(),c.removeFromPreviewOnError&&a("#"+f).remove(),c.isUploadable?c._showUploadError(d,h):c._showError(d,i)};c.loadedImages=[],c.totalImagesCount=0,a.each(b,function(a,b){var d=c.fileTypeSettings.image||ca.image;d&&d(b.type)&&c.totalImagesCount++}),q=function(a){if(da(d.attr("multiple"))&&(n=1),a>=n)return c.isUploadable&&c.filestack.length>0?c._raise("filebatchselected",[c.getFileStack()]):c._raise("filebatchselected",[b]),i.removeClass("file-thumb-loading"),void j.html("");var w,x,B,F,G,H,I,u=p+a,v=m+"-"+u,y=b[a],z=c.slug(y.name),A=(y.size||0)/1e3,C="",D=g.createObjectURL(y),E=0,J=c.allowedFileTypes,K=da(J)?"":J.join(", "),L=c.allowedFileExtensions,M=da(L)?"":L.join(", ");if(da(L)||(C=new RegExp("\\.("+L.join("|")+")$","i")),A=A.toFixed(2),c.maxFileSize>0&&A>c.maxFileSize)return G=c.msgSizeTooLarge.replace("{name}",z).replace("{size}",A).replace("{maxSize}",c.maxFileSize),void(c.isError=t(G,y,v,a));if(!da(J)&&ea(J)){for(F=0;F<J.length;F+=1)H=J[F],B=o[H],I=void 0!==B&&B(y.type,z),E+=da(I)?0:I.length;if(0===E)return G=c.msgInvalidFileType.replace("{name}",z).replace("{types}",K),void(c.isError=t(G,y,v,a))}return 0!==E||da(L)||!ea(L)||da(C)||(I=h(z,C),E+=da(I)?0:I.length,0!==E)?c.showPreview?!s&&A>r?(c.addToStack(y),i.addClass("file-thumb-loading"),c._previewDefault(y,v),c._initFileActions(),c._updateFileDetails(n),void q(a+1)):(e.length&&void 0!==FileReader?(j.html(k.replace("{index}",a+1).replace("{files}",n)),i.addClass("file-thumb-loading"),f.onerror=function(a){c._errorHandler(a,z)},f.onload=function(b){c._previewFile(a,y,b,v,D),c._initFileActions()},f.onloadend=function(){G=l.replace("{index}",a+1).replace("{files}",n).replace("{percent}",50).replace("{name}",z),setTimeout(function(){j.html(G),c._updateFileDetails(n),q(a+1)},100),c._raise("fileloaded",[y,v,a,f])},f.onprogress=function(b){if(b.lengthComputable){var c=b.loaded/b.total*100,d=Math.ceil(c);G=l.replace("{index}",a+1).replace("{files}",n).replace("{percent}",d).replace("{name}",z),setTimeout(function(){j.html(G)},100)}},w=fa("text",o,ca.text),x=fa("image",o,ca.image),w(y.type,z)?f.readAsText(y,c.textEncoding):x(y.type,z)?f.readAsDataURL(y):f.readAsArrayBuffer(y)):(c._previewDefault(y,v),setTimeout(function(){q(a+1),c._updateFileDetails(n)},100),c._raise("fileloaded",[y,v,a,f])),void c.addToStack(y)):(c.addToStack(y),setTimeout(function(){q(a+1)},100),void c._raise("fileloaded",[y,v,a,f])):(G=c.msgInvalidFileExtension.replace("{name}",z).replace("{extensions}",M),void(c.isError=t(G,y,v,a)))},q(0),c._updateFileDetails(n,!1)},_updateFileDetails:function(a){var b=this,c=b.$element,d=b.getFileStack(),e=i(9)&&ka(c.val())||c[0].files[0]&&c[0].files[0].name||d.length&&d[0].name||"",f=b.slug(e),g=b.isUploadable?d.length:a,h=k.count(b.id)+g,j=g>1?b._getMsgSelected(h):f;b.isError?(b.$previewContainer.removeClass("file-thumb-loading"),b.$previewStatus.html(""),b.$captionContainer.find(".kv-caption-icon").hide()):b._showFileIcon(),b._setCaption(j,b.isError),b.$container.removeClass("file-input-new file-input-ajax-new"),1===arguments.length&&b._raise("fileselect",[a,f]),k.count(b.id)&&b._initPreviewActions()},_setThumbStatus:function(a,b){var c=this;if(c.showPreview){var d="indicator"+b,e=d+"Title",f="file-preview-"+b.toLowerCase(),g=a.find(".file-upload-indicator"),h=c.fileActionSettings;a.removeClass("file-preview-success file-preview-error file-preview-loading"),"Error"===b&&a.find(".kv-file-upload").attr("disabled",!0),"Success"===b&&(a.find(".file-drag-handle").remove(),g.css("margin-left",0)),g.html(h[d]),g.attr("title",h[e]),a.addClass(f)}},_setProgressCancelled:function(){var a=this;a._setProgress(101,a.$progress,a.msgCancelled)},_setProgress:function(a,b,c){var d=this,e=Math.min(a,100),f=e<100?d.progressTemplate:c?d.progressErrorTemplate:a<=100?d.progressTemplate:d.progressCompleteTemplate,g=d.progressUploadThreshold;if(b=b||d.$progress,!da(f)){if(g&&e>g&&a<=100){var h=f.replace("{percent}",g).replace("{percent}",g).replace("{percent}%",d.msgUploadThreshold);b.html(h)}else b.html(f.replace(/\{percent}/g,e));c&&b.find('[role="progressbar"]').html(c)}},_setFileDropZoneTitle:function(){var d,a=this,b=a.$container.find(".file-drop-zone"),c=a.dropZoneTitle;a.isClickable&&(d=da(a.$element.attr("multiple"))?a.fileSingle:a.filePlural,c+=a.dropZoneClickTitle.replace("{files}",d)),b.find("."+a.dropZoneTitleClass).remove(),a.isUploadable&&a.showPreview&&0!==b.length&&!(a.getFileStack().length>0)&&a.dropZoneEnabled&&(0===b.find(".file-preview-frame").length&&da(a.defaultPreviewContent)&&b.prepend('<div class="'+a.dropZoneTitleClass+'">'+c+"</div>"),a.$container.removeClass("file-input-new"),p(a.$container,"file-input-ajax-new"))},_setAsyncUploadStatus:function(b,c,d){var e=this,f=0;e._setProgress(c,a("#"+b).find(".file-thumb-progress")),e.uploadStatus[b]=c,a.each(e.uploadStatus,function(a,b){f+=b}),e._setProgress(Math.floor(f/d))},_validateMinCount:function(){var a=this,b=a.isUploadable?a.getFileStack().length:a.$element.get(0).files.length;return!(a.validateInitialCount&&a.minFileCount>0&&a._getFileCount(b-1)<a.minFileCount)||(a._noFilesError({}),!1)},_getFileCount:function(a){var b=this,c=0;return b.validateInitialCount&&!b.overwriteInitial&&(c=k.count(b.id),a+=c),a},_getFileName:function(a){return a&&a.name?this.slug(a.name):void 0},_getFileNames:function(a){var b=this;return b.filenames.filter(function(b){return a?void 0!==b:void 0!==b&&null!==b})},_setPreviewError:function(a,b,c){var d=this;void 0!==b&&d.updateStack(b,c),d.removeFromPreviewOnError?a.remove():d._setThumbStatus(a,"Error")},_checkDimensions:function(a,b,c,d,e,f,g){var i,j,m,n,h=this,k="Small"===b?"min":"max",l=h[k+"Image"+f];!da(l)&&c.length&&(m=c[0],j="Width"===f?m.naturalWidth||m.width:m.naturalHeight||m.height,n="Small"===b?j>=l:j<=l,n||(i=h["msgImage"+f+b].replace("{name}",e).replace("{size}",l),h._showUploadError(i,g),h._setPreviewError(d,a,null)))},_validateImage:function(a,b,c,d){var h,i,k,e=this,f=e.$preview,l=f.find("#"+b),m=l.find("img");c=c||"Untitled",m.length&&j(m,"load",function(){i=l.width(),k=f.width(),i>k&&(m.css("width","100%"),l.css("width","97%")),h={ind:a,id:b},e._checkDimensions(a,"Small",m,l,c,"Width",h),e._checkDimensions(a,"Small",m,l,c,"Height",h),e.resizeImage||(e._checkDimensions(a,"Large",m,l,c,"Width",h),e._checkDimensions(a,"Large",m,l,c,"Height",h)),e._raise("fileimageloaded",[b]),e.loadedImages.push({ind:a,img:m,thumb:l,pid:b,typ:d}),e._validateAllImages(),g.revokeObjectURL(m.attr("src"))})},_validateAllImages:function(){var b,c,d,e,f,g,i,a=this,h={};if(a.loadedImages.length===a.totalImagesCount&&(a._raise("fileimagesloaded"),a.resizeImage)){for(i=a.isUploadable?a._showUploadError:a._showError,b=0;b<a.loadedImages.length;b++)c=a.loadedImages[b],d=c.img,e=c.thumb,f=c.pid,g=c.ind,h={id:f,index:g},a._getResizedImage(d[0],c.typ,f,g)||(i(a.msgImageResizeError,h,"fileimageresizeerror"),a._setPreviewError(e,g));a._raise("fileimagesresized")}},_getResizedImage:function(a,b,c,d){var l,m,e=this,f=a.naturalWidth,g=a.naturalHeight,h=1,i=e.maxImageWidth||f,j=e.maxImageHeight||g,k=f&&g,n=e.imageCanvas,o=e.imageCanvasContext;if(!k)return!1;if(f===i&&g===j)return!0;b=b||e.resizeDefaultImageType,l=f>i,m=g>j,h="width"===e.resizePreference?l?i/f:m?j/g:1:m?j/g:l?i/f:1,e._resetCanvas(),f*=h,g*=h,n.width=f,n.height=g;try{return o.drawImage(a,0,0,f,g),n.toBlob(function(a){e._raise("fileimageresized",[c,d]),e.filestack[d]=a},b,e.resizeQuality),!0}catch(a){return!1}},_initBrowse:function(a){var b=this;b.showBrowse?(b.$btnFile=a.find(".btn-file"),b.$btnFile.append(b.$element)):b.$element.hide()},_initCaption:function(){var a=this,b=a.initialCaption||"";return a.overwriteInitial||da(b)?(a.$caption.html(""),!1):(a._setCaption(b),!0)},_setCaption:function(b,c){var e,f,g,h,d=this,i=d.getFileStack();if(d.$caption.length){if(c)e=a("<div>"+d.msgValidationError+"</div>").text(),g=i.length,h=g?1===g&&i[0]?d._getFileNames()[0]:d._getMsgSelected(g):d._getMsgSelected(d.msgNo),f='<span class="'+d.msgValidationErrorClass+'">'+d.msgValidationErrorIcon+(da(b)?h:b)+"</span>";else{if(da(b))return;e=a("<div>"+b+"</div>").text(),f=d._getLayoutTemplate("fileIcon")+e}d.$caption.html(f),d.$caption.attr("title",e),d.$captionContainer.find(".file-caption-ellipsis").attr("title",e)}},_createContainer:function(){var b=this,c=a(document.createElement("div")).attr({class:"file-input file-input-new"}).html(b._renderMain());return b.$element.before(c),b._initBrowse(c),b.theme&&c.addClass("theme-"+b.theme),c},_refreshContainer:function(){var a=this,b=a.$container;b.before(a.$element),b.html(a._renderMain()),a._initBrowse(b)},_renderMain:function(){var a=this,b=a.isUploadable&&a.dropZoneEnabled?" file-drop-zone":"file-drop-disabled",c=a.showClose?a._getLayoutTemplate("close"):"",d=a.showPreview?a._getLayoutTemplate("preview").replace(/\{class}/g,a.previewClass).replace(/\{dropClass}/g,b):"",e=a.isDisabled?a.captionClass+" file-caption-disabled":a.captionClass,f=a.captionTemplate.replace(/\{class}/g,e+" kv-fileinput-caption");return a.mainTemplate.replace(/\{class}/g,a.mainClass+(!a.showBrowse&&a.showCaption?" no-browse":"")).replace(/\{preview}/g,d).replace(/\{close}/g,c).replace(/\{caption}/g,f).replace(/\{upload}/g,a._renderButton("upload")).replace(/\{remove}/g,a._renderButton("remove")).replace(/\{cancel}/g,a._renderButton("cancel")).replace(/\{browse}/g,a._renderButton("browse"))},_renderButton:function(a){var b=this,c=b._getLayoutTemplate("btnDefault"),d=b[a+"Class"],e=b[a+"Title"],f=b[a+"Icon"],g=b[a+"Label"],h=b.isDisabled?" disabled":"",i="button";switch(a){case"remove":if(!b.showRemove)return"";break;case"cancel":if(!b.showCancel)return"";d+=" hide";break;case"upload":if(!b.showUpload)return"";b.isUploadable&&!b.isDisabled?c=b._getLayoutTemplate("btnLink").replace("{href}",b.uploadUrl):i="submit";break;case"browse":if(!b.showBrowse)return"";c=b._getLayoutTemplate("btnBrowse");break;default:return""}return d+="browse"===a?" btn-file":" fileinput-"+a+" fileinput-"+a+"-button",da(g)||(g=' <span class="'+b.buttonLabelClass+'">'+g+"</span>"),c.replace("{type}",i).replace("{css}",d).replace("{title}",e).replace("{status}",h).replace("{icon}",f).replace("{label}",g)},_renderThumbProgress:function(){return'<div class="file-thumb-progress hide">'+this.progressTemplate.replace(/\{percent}/g,"0")+"</div>"},_renderFileFooter:function(a,b,c,d){var k,e=this,f=e.fileActionSettings,g=f.showRemove,h=f.showDrag,i=f.showUpload,j=f.showZoom,l=e._getLayoutTemplate("footer"),m=d?f.indicatorError:f.indicatorNew,n=d?f.indicatorErrorTitle:f.indicatorNewTitle;return b=e._getSize(b),k=e.isUploadable?l.replace(/\{actions}/g,e._renderFileActions(i,g,j,h,!1,!1,!1)).replace(/\{caption}/g,a).replace(/\{size}/g,b).replace(/\{width}/g,c).replace(/\{progress}/g,e._renderThumbProgress()).replace(/\{indicator}/g,m).replace(/\{indicatorTitle}/g,n):l.replace(/\{actions}/g,e._renderFileActions(!1,!1,j,h,!1,!1,!1)).replace(/\{caption}/g,a).replace(/\{size}/g,b).replace(/\{width}/g,c).replace(/\{progress}/g,"").replace(/\{indicator}/g,m).replace(/\{indicatorTitle}/g,n),k=ia(k,e.previewThumbTags)},_renderFileActions:function(a,b,c,d,e,f,g,h){if(!(a||b||c||d))return"";var p,i=this,j=f===!1?"":' data-url="'+f+'"',k=g===!1?"":' data-key="'+g+'"',l="",m="",n="",o="",q=i._getLayoutTemplate("actions"),r=i.fileActionSettings,s=i.otherActionButtons.replace(/\{dataKey}/g,k),t=e?r.removeClass+" disabled":r.removeClass;return b&&(l=i._getLayoutTemplate("actionDelete").replace(/\{removeClass}/g,t).replace(/\{removeIcon}/g,r.removeIcon).replace(/\{removeTitle}/g,r.removeTitle).replace(/\{dataUrl}/g,j).replace(/\{dataKey}/g,k)),a&&(m=i._getLayoutTemplate("actionUpload").replace(/\{uploadClass}/g,r.uploadClass).replace(/\{uploadIcon}/g,r.uploadIcon).replace(/\{uploadTitle}/g,r.uploadTitle)),c&&(n=i._getLayoutTemplate("actionZoom").replace(/\{zoomClass}/g,r.zoomClass).replace(/\{zoomIcon}/g,r.zoomIcon).replace(/\{zoomTitle}/g,r.zoomTitle)),d&&h&&(p="drag-handle-init "+r.dragClass,o=i._getLayoutTemplate("actionDrag").replace(/\{dragClass}/g,p).replace(/\{dragTitle}/g,r.dragTitle).replace(/\{dragIcon}/g,r.dragIcon)),q.replace(/\{delete}/g,l).replace(/\{upload}/g,m).replace(/\{zoom}/g,n).replace(/\{drag}/g,o).replace(/\{other}/g,s)},_browse:function(a){var b=this;b._raise("filebrowse"),a&&a.isDefaultPrevented()||(b.isError&&!b.isUploadable&&b.clear(),b.$captionContainer.focus())},_change:function(b){var c=this,d=c.$element;if(!c.isUploadable&&da(d.val())&&c.fileInputCleared)return void(c.fileInputCleared=!1);c.fileInputCleared=!1;var e,f,g,l,m,n,h=arguments.length>1,i=c.isUploadable,j=0,o=h?b.originalEvent.dataTransfer.files:d.get(0).files,p=c.filestack.length,q=da(d.attr("multiple")),r=q&&p>0,s=0,t=function(b,d,e,f){var g=a.extend(!0,{},c._getOutData({},{},o),{id:e,index:f}),h={id:e,index:f,file:d,files:o};return c.isUploadable?c._showUploadError(b,g):c._showError(b,h)};if(c.reader=null,c._resetUpload(),c._hideFileIcon(),c.isUploadable&&c.$container.find(".file-drop-zone ."+c.dropZoneTitleClass).remove(),h)for(e=[];o[j];)l=o[j],l.type||l.size%4096!==0?e.push(l):s++,j++;else e=void 0===b.target.files?b.target&&b.target.value?[{name:b.target.value.replace(/^.+\\/,"")}]:[]:b.target.files;if(da(e)||0===e.length)return i||c.clear(),c._showFolderError(s),void c._raise("fileselectnone");if(c._resetErrors(),n=e.length,g=c._getFileCount(c.isUploadable?c.getFileStack().length+n:n),c.maxFileCount>0&&g>c.maxFileCount){if(!c.autoReplace||n>c.maxFileCount)return m=c.autoReplace&&n>c.maxFileCount?n:g,f=c.msgFilesTooMany.replace("{m}",c.maxFileCount).replace("{n}",m),c.isError=t(f,null,null,null),c.$captionContainer.find(".kv-caption-icon").hide(),c._setCaption("",!0),void c.$container.removeClass("file-input-new file-input-ajax-new");g>c.maxFileCount&&c._resetPreviewThumbs(i)}else!i||r?(c._resetPreviewThumbs(!1),r&&c.clearStack()):!i||0!==p||k.count(c.id)&&!c.overwriteInitial||c._resetPreviewThumbs(!0);c.isPreviewable?c._readFiles(e):c._updateFileDetails(1),c._showFolderError(s)},_abort:function(b){var d,c=this;return!(!c.ajaxAborted||"object"!=typeof c.ajaxAborted||void 0===c.ajaxAborted.message)&&(d=a.extend(!0,{},c._getOutData(),b),d.abortData=c.ajaxAborted.data||{},d.abortMessage=c.ajaxAborted.message,c.cancel(),c._setProgress(101,c.$progress,c.msgCancelled),c._showUploadError(c.ajaxAborted.message,d,"filecustomerror"),!0)},_resetFileStack:function(){var b=this,c=0,d=[],e=[];b._getThumbs().each(function(){var f=a(this),g=f.attr("data-fileindex"),h=b.filestack[g];g!==-1&&(void 0!==h?(d[c]=h,e[c]=b._getFileName(h),f.attr({id:b.previewInitId+"-"+c,"data-fileindex":c}),c++):f.attr({id:"uploaded-"+ga(),"data-fileindex":"-1"}))}),b.filestack=d,b.filenames=e},clearStack:function(){var a=this;return a.filestack=[],a.filenames=[],a.$element},updateStack:function(a,b){var c=this;return c.filestack[a]=b,c.filenames[a]=c._getFileName(b),c.$element},addToStack:function(a){var b=this;return b.filestack.push(a),b.filenames.push(b._getFileName(a)),b.$element},getFileStack:function(a){var b=this;return b.filestack.filter(function(b){return a?void 0!==b:void 0!==b&&null!==b})},getFilesCount:function(){var a=this,b=a.isUploadable?a.getFileStack().length:a.$element.get(0).files.length;return a._getFileCount(b)},lock:function(){var a=this;return a._resetErrors(),a.disable(),a.showRemove&&p(a.$container.find(".fileinput-remove"),"hide"),a.showCancel&&a.$container.find(".fileinput-cancel").removeClass("hide"),a._raise("filelock",[a.filestack,a._getExtraData()]),a.$element},unlock:function(a){var b=this;return void 0===a&&(a=!0),b.enable(),b.showCancel&&p(b.$container.find(".fileinput-cancel"),"hide"),b.showRemove&&b.$container.find(".fileinput-remove").removeClass("hide"),a&&b._resetFileStack(),b._raise("fileunlock",[b.filestack,b._getExtraData()]),b.$element},cancel:function(){var e,b=this,c=b.ajaxRequests,d=c.length;if(d>0)for(e=0;e<d;e+=1)b.cancelling=!0,c[e].abort();return b._setProgressCancelled(),b._getThumbs().each(function(){var c=a(this),d=c.attr("data-fileindex");c.removeClass("file-uploading"),void 0!==b.filestack[d]&&(c.find(".kv-file-upload").removeClass("disabled").removeAttr("disabled"),c.find(".kv-file-remove").removeClass("disabled").removeAttr("disabled")),b.unlock()}),b.$element},clear:function(){var c,b=this;return b.$btnUpload.removeAttr("disabled"),b._getThumbs().find("video,audio,img").each(function(){ja(a(this))}),b._resetUpload(),b.clearStack(),b._clearFileInput(),b._resetErrors(!0),b._raise("fileclear"),b._hasInitialPreview()?(b._showFileIcon(),b._resetPreview(),b._initPreviewActions(),b.$container.removeClass("file-input-new")):(b._getThumbs().each(function(){b._clearObjects(a(this))}),b.isUploadable&&(k.data[b.id]={}),b.$preview.html(""),c=!b.overwriteInitial&&b.initialCaption.length>0?b.initialCaption:"",b.$caption.html(c),b.$caption.attr("title",""),p(b.$container,"file-input-new"),b._validateDefaultPreview()),0===b.$container.find(".file-preview-frame").length&&(b._initCaption()||b.$captionContainer.find(".kv-caption-icon").hide()),b._hideFileIcon(),b._raise("filecleared"),b.$captionContainer.focus(),b._setFileDropZoneTitle(),b.$element},reset:function(){var a=this;return a._resetPreview(),a.$container.find(".fileinput-filename").text(""),a._raise("filereset"),p(a.$container,"file-input-new"),(a.$preview.find(".file-preview-frame").length||a.isUploadable&&a.dropZoneEnabled)&&a.$container.removeClass("file-input-new"),
a._setFileDropZoneTitle(),a.clearStack(),a.formdata={},a.$element},disable:function(){var a=this;return a.isDisabled=!0,a._raise("filedisabled"),a.$element.attr("disabled","disabled"),a.$container.find(".kv-fileinput-caption").addClass("file-caption-disabled"),a.$container.find(".btn-file, .fileinput-remove, .fileinput-upload, .file-preview-frame button").attr("disabled",!0),a._initDragDrop(),a.$element},enable:function(){var a=this;return a.isDisabled=!1,a._raise("fileenabled"),a.$element.removeAttr("disabled"),a.$container.find(".kv-fileinput-caption").removeClass("file-caption-disabled"),a.$container.find(".btn-file, .fileinput-remove, .fileinput-upload, .file-preview-frame button").removeAttr("disabled"),a._initDragDrop(),a.$element},upload:function(){var e,f,g,b=this,c=b.getFileStack().length,d={},h=!a.isEmptyObject(b._getExtraData());if(b.minFileCount>0&&b._getFileCount(c)<b.minFileCount)return void b._noFilesError(d);if(b.isUploadable&&!b.isDisabled&&(0!==c||h)){if(b._resetUpload(),b.$progress.removeClass("hide"),b.uploadCount=0,b.uploadStatus={},b.uploadLog=[],b.lock(),b._setProgress(2),0===c&&h)return void b._uploadExtraOnly();if(g=b.filestack.length,b.hasInitData=!1,!b.uploadAsync)return b._uploadBatch(),b.$element;for(f=b._getOutData(),b._raise("filebatchpreupload",[f]),b.fileBatchCompleted=!1,b.uploadCache={content:[],config:[],tags:[],append:!0},b.uploadAsyncCount=b.getFileStack().length,e=0;e<g;e++)b.uploadCache.content[e]=null,b.uploadCache.config[e]=null,b.uploadCache.tags[e]=null;for(e=0;e<g;e++)void 0!==b.filestack[e]&&b._uploadSingle(e,b.filestack,!0)}},destroy:function(){var a=this,c=a.$container;return c.find(".file-drop-zone").off(),a.$element.insertBefore(c).off(b).removeData(),c.off().remove(),a.$element},refresh:function(b){var c=this,d=c.$element;return b=b?a.extend(!0,{},c.options,b):c.options,c.destroy(),d.fileinput(b),d.val()&&d.trigger("change.fileinput"),d}},a.fn.fileinput=function(b){if(m()||i(9)){var c=Array.apply(null,arguments),d=[];switch(c.shift(),this.each(function(){var l,e=a(this),f=e.data("fileinput"),g="object"==typeof b&&b,h=g.theme||e.data("theme"),i={},j={},k=g.language||e.data("language")||"en";f||(h&&(j=a.fn.fileinputThemes[h]||{}),"en"===k||da(a.fn.fileinputLocales[k])||(i=a.fn.fileinputLocales[k]||{}),l=a.extend(!0,{},a.fn.fileinput.defaults,j,a.fn.fileinputLocales.en,i,g,e.data()),f=new oa(this,l),e.data("fileinput",f)),"string"==typeof b&&d.push(f[b].apply(f,c))}),d.length){case 0:return this;case 1:return d[0];default:return d}}},a.fn.fileinput.defaults={language:"en",showCaption:!0,showBrowse:!0,showPreview:!0,showRemove:!0,showUpload:!0,showCancel:!0,showClose:!0,showUploadedThumbs:!0,browseOnZoneClick:!1,autoReplace:!1,previewClass:"",captionClass:"",mainClass:"file-caption-main",mainTemplate:null,purifyHtml:!0,fileSizeGetter:null,initialCaption:"",initialPreview:[],initialPreviewDelimiter:"*$$*",initialPreviewAsData:!1,initialPreviewFileType:"image",initialPreviewConfig:[],initialPreviewThumbTags:[],previewThumbTags:{},initialPreviewShowDelete:!0,removeFromPreviewOnError:!1,deleteUrl:"",deleteExtraData:{},overwriteInitial:!0,layoutTemplates:Y,previewTemplates:Z,previewZoomSettings:$,previewZoomButtonIcons:{prev:'<i class="glyphicon glyphicon-triangle-left"></i>',next:'<i class="glyphicon glyphicon-triangle-right"></i>',toggleheader:'<i class="glyphicon glyphicon-resize-vertical"></i>',fullscreen:'<i class="glyphicon glyphicon-fullscreen"></i>',borderless:'<i class="glyphicon glyphicon-resize-full"></i>',close:'<i class="glyphicon glyphicon-remove"></i>'},previewZoomButtonClasses:{prev:"btn btn-navigate",next:"btn btn-navigate",toggleheader:"btn btn-default btn-header-toggle",fullscreen:"btn btn-default",borderless:"btn btn-default",close:"btn btn-default"},allowedPreviewTypes:_,allowedPreviewMimeTypes:null,allowedFileTypes:null,allowedFileExtensions:null,defaultPreviewContent:null,customLayoutTags:{},customPreviewTags:{},previewSettings:ba,fileTypeSettings:ca,previewFileIcon:'<i class="glyphicon glyphicon-file"></i>',previewFileIconClass:"file-other-icon",previewFileIconSettings:{},previewFileExtSettings:{},buttonLabelClass:"hidden-xs",browseIcon:'<i class="glyphicon glyphicon-folder-open"></i> ',browseClass:"btn btn-primary",removeIcon:'<i class="glyphicon glyphicon-trash"></i>',removeClass:"btn btn-default",cancelIcon:'<i class="glyphicon glyphicon-ban-circle"></i>',cancelClass:"btn btn-default",uploadIcon:'<i class="glyphicon glyphicon-upload"></i>',uploadClass:"btn btn-default",uploadUrl:null,uploadAsync:!0,uploadExtraData:{},zoomModalHeight:480,minImageWidth:null,minImageHeight:null,maxImageWidth:null,maxImageHeight:null,resizeImage:!1,resizePreference:"width",resizeQuality:.92,resizeDefaultImageType:"image/jpeg",maxFileSize:0,maxFilePreviewSize:25600,minFileCount:0,maxFileCount:0,validateInitialCount:!1,msgValidationErrorClass:"text-danger",msgValidationErrorIcon:'<i class="glyphicon glyphicon-exclamation-sign"></i> ',msgErrorClass:"file-error-message",progressThumbClass:"progress-bar progress-bar-success progress-bar-striped active",progressClass:"progress-bar progress-bar-success progress-bar-striped active",progressCompleteClass:"progress-bar progress-bar-success",progressErrorClass:"progress-bar progress-bar-danger",progressUploadThreshold:99,previewFileType:"image",elCaptionContainer:null,elCaptionText:null,elPreviewContainer:null,elPreviewImage:null,elPreviewStatus:null,elErrorContainer:null,errorCloseButton:'<span class="close kv-error-close">×</span>',slugCallback:null,dropZoneEnabled:!0,dropZoneTitleClass:"file-drop-zone-title",fileActionSettings:{},otherActionButtons:"",textEncoding:"UTF-8",ajaxSettings:{},ajaxDeleteSettings:{},showAjaxErrorDetails:!0},a.fn.fileinputLocales.en={fileSingle:"file",filePlural:"files",browseLabel:"Browse …",removeLabel:"Remove",removeTitle:"Clear selected files",cancelLabel:"Cancel",cancelTitle:"Abort ongoing upload",uploadLabel:"Upload",uploadTitle:"Upload selected files",msgNo:"No",msgNoFilesSelected:"No files selected",msgCancelled:"Cancelled",msgZoomModalHeading:"Detailed Preview",msgSizeTooLarge:'File "{name}" (<b>{size} KB</b>) exceeds maximum allowed upload size of <b>{maxSize} KB</b>.',msgFilesTooLess:"You must select at least <b>{n}</b> {files} to upload.",msgFilesTooMany:"Number of files selected for upload <b>({n})</b> exceeds maximum allowed limit of <b>{m}</b>.",msgFileNotFound:'File "{name}" not found!',msgFileSecured:'Security restrictions prevent reading the file "{name}".',msgFileNotReadable:'File "{name}" is not readable.',msgFilePreviewAborted:'File preview aborted for "{name}".',msgFilePreviewError:'An error occurred while reading the file "{name}".',msgInvalidFileType:'Invalid type for file "{name}". Only "{types}" files are supported.',msgInvalidFileExtension:'Invalid extension for file "{name}". Only "{extensions}" files are supported.',msgUploadAborted:"The file upload was aborted",msgUploadThreshold:"Processing...",msgValidationError:"Validation Error",msgLoading:"Loading file {index} of {files} …",msgProgress:"Loading file {index} of {files} - {name} - {percent}% completed.",msgSelected:"{n} {files} selected",msgFoldersNotAllowed:"Drag & drop files only! {n} folder(s) dropped were skipped.",msgImageWidthSmall:'Width of image file "{name}" must be at least {size} px.',msgImageHeightSmall:'Height of image file "{name}" must be at least {size} px.',msgImageWidthLarge:'Width of image file "{name}" cannot exceed {size} px.',msgImageHeightLarge:'Height of image file "{name}" cannot exceed {size} px.',msgImageResizeError:"Could not get the image dimensions to resize.",msgImageResizeException:"Error while resizing the image.<pre>{errors}</pre>",dropZoneTitle:"Drag & drop files here …",dropZoneClickTitle:"<br>(or click to select {files})",previewZoomButtonTitles:{prev:"View previous file",next:"View next file",toggleheader:"Toggle header",fullscreen:"Toggle full screen",borderless:"Toggle borderless mode",close:"Close detailed preview"}},a.fn.fileinput.Constructor=oa,a(document).ready(function(){var b=a("input.file[type=file]");b.length&&b.fileinput()})});
|
#!/usr/bin/env python
#################################################################
# Python Script to retrieve 164 online Data files of 'ds131.2',
# total 3.02G. This script uses 'requests' to download data.
#
# Highlight this script by Select All, Copy and Paste it into a file;
# make the file executable and run it on command line.
#
# You need pass in your password as a parameter to execute
# this script; or you can set an environment variable RDAPSWD
# if your Operating System supports it.
#
# Contact rpconroy@ucar.edu (Riley Conroy) for further assistance.
#################################################################
import sys, os
import requests
def check_file_status(filepath, filesize):
sys.stdout.write('\r')
sys.stdout.flush()
size = int(os.stat(filepath).st_size)
percent_complete = (size / filesize) * 100
sys.stdout.write('%.3f %s' % (percent_complete, '% Completed'))
sys.stdout.flush()
# Try to get password
if len(sys.argv) < 2 and not 'RDAPSWD' in os.environ:
try:
import getpass
input = getpass.getpass
except:
try:
input = raw_input
except:
pass
pswd = input('Password: ')
else:
try:
pswd = sys.argv[1]
except:
pswd = os.environ['RDAPSWD']
url = 'https://rda.ucar.edu/cgi-bin/login'
values = {'email': '1811017@tongji.edu.cn', 'passwd': pswd, 'action': 'login'}
# Authenticate
ret = requests.post(url, data=values)
if ret.status_code != 200:
print('Bad Authentication')
print(ret.text)
exit(1)
dspath = 'https://rda.ucar.edu/data/ds131.2/'
filelist = [
'pgrbanl/pgrbanl_mean_1851_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1852_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1853_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1854_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1855_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1856_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1857_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1858_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1859_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1860_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1861_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1862_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1863_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1864_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1865_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1866_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1867_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1868_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1869_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1870_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1871_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1872_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1873_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1874_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1875_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1876_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1877_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1878_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1879_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1880_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1881_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1882_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1883_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1884_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1885_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1886_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1887_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1888_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1889_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1890_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1891_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1892_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1893_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1894_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1895_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1896_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1897_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1898_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1899_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1900_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1901_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1902_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1903_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1904_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1905_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1906_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1907_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1908_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1909_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1910_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1911_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1912_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1913_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1914_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1915_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1916_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1917_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1918_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1919_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1920_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1921_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1922_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1923_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1924_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1925_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1926_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1927_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1928_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1929_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1930_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1931_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1932_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1933_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1934_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1935_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1936_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1937_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1938_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1939_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1940_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1941_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1942_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1943_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1944_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1945_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1946_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1947_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1948_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1949_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1950_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1951_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1952_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1953_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1954_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1955_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1956_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1957_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1958_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1959_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1960_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1961_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1962_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1963_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1964_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1965_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1966_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1967_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1968_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1969_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1970_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1971_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1972_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1973_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1974_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1975_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1976_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1977_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1978_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1979_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1980_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1981_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1982_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1983_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1984_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1985_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1986_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1987_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1988_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1989_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1990_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1991_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1992_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1993_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1994_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1995_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1996_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1997_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1998_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_1999_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_2000_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_2001_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_2002_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_2003_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_2004_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_2005_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_2006_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_2007_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_2008_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_2009_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_2010_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_2011_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_2012_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_2013_CWAT_atmos-col.grib',
'pgrbanl/pgrbanl_mean_2014_CWAT_atmos-col.grib']
for file in filelist:
filename = dspath + file
file_base = '../meta-data/cwat/' + os.path.basename(file)
print('Downloading', file_base)
req = requests.get(filename, cookies=ret.cookies, allow_redirects=True, stream=True)
filesize = int(req.headers['Content-length'])
with open(file_base, 'wb') as outfile:
chunk_size = 1048576
for chunk in req.iter_content(chunk_size=chunk_size):
outfile.write(chunk)
if chunk_size < filesize:
check_file_status(file_base, filesize)
check_file_status(file_base, filesize)
print()
|
from typing import List
class KeypadInstructionsInterpreter:
KEYPAD = [
[1, 2, 3],
[4, 5, 6],
[7, 8, 9]
]
instructions = []
keypad_combination = []
def __init__(self, instructions_string:str) -> "KeypadInstructionsInterpreter":
self.instructions_string = instructions_string
self.parse_instructions(instructions_string)
def parse_instructions(self, instructions_string:str) -> List:
digit_directions = instructions_string.split("\n")
for direction in digit_directions:
self.instructions.append(list(direction))
def interpret_instruction(self, instruction:str) -> None:
return
def get_keypad_combination(self) -> None:
return
if __name__ == "__main__":
test_input = test_input = "ULL\nRRDDD\nLURDL\nUUUUD"
kii = KeypadInstructionsInterpreter(test_input)
print(kii.instructions)
|
# Copyright (c) 2020 Oxford-Hainan Blockchain Research Institute
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ccf
import argparse
import subprocess
import agent
import time
import os
import json
import signal
import select
import traceback
import utils
import web3
from multiprocessing import Process
from ccf.clients import CCFClient, Identity
def get_args():
parser = argparse.ArgumentParser(description='cloak manager')
sp = parser.add_subparsers(title="command", dest="command")
setup_service = sp.add_parser("setup")
setup_service.add_argument('--build-path', help='cloak-tee build path', required=True)
setup_service.add_argument('--cloak-tee-port', type=int, help='cloak tee port', default=8000)
setup_service.add_argument('--blockchain-http-uri', help='blockchain http uri', default="http://127.0.0.1:8545")
setup_service.add_argument('--cloak-service-address', help='deployed cloak service address', default=None)
setup_service.add_argument('--cloak-service-path', help='cloak service path', default=None)
setup_service.add_argument('--manager-address', help='manager cloak service address', default=None)
args = parser.parse_args()
return args
class Cloak:
def __init__(self, args):
self.args = args
self.cloak_service_addr = getattr(args, 'cloak_service_address', None)
def run(self):
if (self.args.command == "setup"):
# self.deploy_sol_contracts()
self.setup_cloak_service()
def setup_cloak_service(self):
try:
cloak_tee_proc = self.run_cloak_tee()
agent_proc = self.run_cloak_tee_agent()
self.prepare_cloak_tee()
cloak_tee_proc.wait()
agent_proc.join()
except Exception as e:
traceback.print_exc()
print(f"err:{e}")
if cloak_tee_proc:
os.killpg(os.getpgid(cloak_tee_proc.pid), signal.SIGTERM)
if agent_proc:
agent_proc.kill()
def run_cloak_tee(self):
print("start cloak-tee")
process = subprocess.Popen(
"/opt/ccf-0.15.2/bin/sandbox.sh -p libevm4ccf.virtual.so".split(),
cwd=self.args.build_path, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
s = select.poll()
s.register(process.stdout)
while True:
time.sleep(0.1)
line = process.stdout.readline().decode()
print(line[:-1])
if line.find("Press Ctrl+C to shutdown the network") != -1:
print("cloak-tee started")
break
return process
def run_cloak_tee_agent(self):
p = Process(target=agent.loop_for_log, args=(self.args,))
p.start()
time.sleep(1)
print("cloak-tee-agent started")
return p
def prepare_cloak_tee(self):
ccf_client = utils.get_ccf_client(self.args)
f = open(self.args.cloak_service_path + '/CloakService.json', 'r')
contract = json.loads(f.read())
f.close()
ccf_client.call("/app/cloak_prepare", {
"manager": self.args.manager_address,
"cloakServiceContract": contract['bytecode']
})
print("cloak-prepare DONE")
if __name__ == "__main__":
args = get_args()
print(args)
cloak = Cloak(args)
cloak.run()
|
//
// Created by Bradley Austin Davis on 2018/01/09
// Copyright 2013-2018 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_RenderCommonTask_h
#define hifi_RenderCommonTask_h
#include <gpu/Pipeline.h>
#include <render/RenderFetchCullSortTask.h>
#include "LightingModel.h"
class BeginGPURangeTimer {
public:
using JobModel = render::Job::ModelO<BeginGPURangeTimer, gpu::RangeTimerPointer>;
BeginGPURangeTimer(const std::string& name) : _gpuTimer(std::make_shared<gpu::RangeTimer>(name)) {}
void run(const render::RenderContextPointer& renderContext, gpu::RangeTimerPointer& timer);
protected:
gpu::RangeTimerPointer _gpuTimer;
};
using GPURangeTimerConfig = render::GPUJobConfig;
class EndGPURangeTimer {
public:
using Config = GPURangeTimerConfig;
using JobModel = render::Job::ModelI<EndGPURangeTimer, gpu::RangeTimerPointer, Config>;
EndGPURangeTimer() {}
void configure(const Config& config) {}
void run(const render::RenderContextPointer& renderContext, const gpu::RangeTimerPointer& timer);
protected:
};
class DrawOverlay3DConfig : public render::Job::Config {
Q_OBJECT
Q_PROPERTY(int numDrawn READ getNumDrawn NOTIFY numDrawnChanged)
Q_PROPERTY(int maxDrawn MEMBER maxDrawn NOTIFY dirty)
public:
int getNumDrawn() { return numDrawn; }
void setNumDrawn(int num) { numDrawn = num; emit numDrawnChanged(); }
int maxDrawn{ -1 };
signals:
void numDrawnChanged();
void dirty();
protected:
int numDrawn{ 0 };
};
class DrawOverlay3D {
public:
using Inputs = render::VaryingSet3<render::ItemBounds, LightingModelPointer, glm::vec2>;
using Config = DrawOverlay3DConfig;
using JobModel = render::Job::ModelI<DrawOverlay3D, Inputs, Config>;
DrawOverlay3D(bool opaque);
void configure(const Config& config) { _maxDrawn = config.maxDrawn; }
void run(const render::RenderContextPointer& renderContext, const Inputs& inputs);
protected:
render::ShapePlumberPointer _shapePlumber;
int _maxDrawn; // initialized by Config
bool _opaquePass { true };
};
class CompositeHUD {
public:
using JobModel = render::Job::Model<CompositeHUD>;
CompositeHUD() {}
void run(const render::RenderContextPointer& renderContext);
};
class Blit {
public:
using JobModel = render::Job::ModelI<Blit, gpu::FramebufferPointer>;
void run(const render::RenderContextPointer& renderContext, const gpu::FramebufferPointer& srcFramebuffer);
};
class ExtractFrustums {
public:
enum Frustum {
SHADOW_CASCADE0_FRUSTUM = 0,
SHADOW_CASCADE1_FRUSTUM,
SHADOW_CASCADE2_FRUSTUM,
SHADOW_CASCADE3_FRUSTUM,
SHADOW_CASCADE_FRUSTUM_COUNT,
VIEW_FRUSTUM = SHADOW_CASCADE_FRUSTUM_COUNT,
FRUSTUM_COUNT
};
using Output = render::VaryingArray<ViewFrustumPointer, FRUSTUM_COUNT>;
using JobModel = render::Job::ModelO<ExtractFrustums, Output>;
void run(const render::RenderContextPointer& renderContext, Output& output);
};
#endif // hifi_RenderDeferredTask_h
|
/*!
* Copyright 2015 by Contributors
* \file simple_dmatrix.h
* \brief In-memory version of DMatrix.
* \author Tianqi Chen
*/
#ifndef XGBOOST_DATA_SIMPLE_DMATRIX_H_
#define XGBOOST_DATA_SIMPLE_DMATRIX_H_
#include <xgboost/base.h>
#include <xgboost/data.h>
#include <algorithm>
#include <memory>
#include <limits>
#include <utility>
#include <vector>
#include "simple_csr_source.h"
#include "../common/group_data.h"
#include "../common/math.h"
#include "adapter.h"
namespace xgboost {
namespace data {
// Used for single batch data.
class SimpleDMatrix : public DMatrix {
public:
explicit SimpleDMatrix(std::unique_ptr<DataSource<SparsePage>>&& source)
: source_(std::move(source)) {}
template <typename AdapterT>
explicit SimpleDMatrix(AdapterT* adapter, float missing, int nthread);
MetaInfo& Info() override;
const MetaInfo& Info() const override;
float GetColDensity(size_t cidx) override;
bool SingleColBlock() const override;
private:
BatchSet<SparsePage> GetRowBatches() override;
BatchSet<CSCPage> GetColumnBatches() override;
BatchSet<SortedCSCPage> GetSortedColumnBatches() override;
BatchSet<EllpackPage> GetEllpackBatches(const BatchParam& param) override;
// source data pointer.
std::unique_ptr<DataSource<SparsePage>> source_;
std::unique_ptr<CSCPage> column_page_;
std::unique_ptr<SortedCSCPage> sorted_column_page_;
std::unique_ptr<EllpackPage> ellpack_page_;
};
} // namespace data
} // namespace xgboost
#endif // XGBOOST_DATA_SIMPLE_DMATRIX_H_
|
module.exports = ({ actions }) => {
actions.createTypes(`
type Article implements Node {
id: ID!
slug: String!
title: String!
date: Date! @dateformat
author: String!
excerpt(pruneLength: Int = 140): String!
body: String!
hero: File @fileByRelativePath
thumbnail: File @fileByRelativePath
video: String
timeToRead: Int
}
`);
};
|
__author__ = "aleaf"
import sys
import os
import numpy as np
import warnings
import copy
from numpy.lib import recfunctions
from ..pakbase import Package
from ..utils import MfList
from ..utils.flopy_io import line_parse
from ..utils.recarray_utils import create_empty_recarray
from ..utils.optionblock import OptionBlock
from collections import OrderedDict
try:
import pandas as pd
except:
pd = False
try:
from numpy.lib import NumpyVersion
numpy114 = NumpyVersion(np.__version__) >= "1.14.0"
except ImportError:
numpy114 = False
if numpy114:
# use numpy's floating-point formatter (Dragon4)
default_float_format = "{!s}"
else:
# single-precision floats have ~7.2 decimal digits
default_float_format = "{:.8g}"
class ModflowSfr2(Package):
"""
Streamflow-Routing (SFR2) Package Class
Parameters
----------
model : model object
The model object (of type :class:'flopy.modflow.mf.Modflow') to which
this package will be added.
nstrm : integer
An integer value that can be specified to be positive or negative. The
absolute value of NSTRM is equal to the number of stream reaches
(finite-difference cells) that are active during the simulation and
the number of lines of data to be included in Item 2, described below.
When NSTRM is specified to be a negative integer, it is also used as a
flag for changing the format of the data input, for simulating
unsaturated flow beneath streams, and (or) for simulating transient
streamflow routing (for MODFLOW-2005 simulations only), depending
on the values specified for variables ISFROPT and IRTFLG, as described
below. When NSTRM is negative, NSFRPAR must be set to zero, which means
that parameters cannot be specified. By default, nstrm is set to
negative.
nss : integer
An integer value equal to the number of stream segments (consisting of
one or more reaches) that are used to define the complete stream
network. The value of NSS represents the number of segments that must
be defined through a combination of parameters and variables in Item 4
or variables in Item 6.
nparseg : integer
An integer value equal to (or exceeding) the number of stream-segment
definitions associated with all parameters. This number can be more
than the total number of segments (NSS) in the stream network because
the same segment can be defined in multiple parameters, and because
parameters can be time-varying. NPARSEG must equal or exceed the sum
of NLST x N for all parameters, where N is the greater of 1 and
NUMINST; that is, NPARSEG must equal or exceed the total number of
repetitions of item 4b. This variable must be zero when NSTRM is
negative.
const : float
A real value (or conversion factor) used in calculating stream depth
for stream reach. If stream depth is not calculated using Manning's
equation for any stream segment (that is, ICALC does not equal 1 or 2),
then a value of zero can be entered. If Manning's equation is used, a
constant of 1.486 is used for flow units of cubic feet per second, and
a constant of 1.0 is used for units of cubic meters per second. The
constant must be multiplied by 86,400 when using time units of days in
the simulation. An explanation of time units used in MODFLOW is given
by Harbaugh and others (2000, p. 10).
dleak : float
A real value equal to the tolerance level of stream depth used in
computing leakage between each stream reach and active model cell.
Value is in units of length. Usually a value of 0.0001 is sufficient
when units of feet or meters are used in model.
ipakcb : integer
An integer value used as a flag for writing stream-aquifer leakage
values. If ipakcb > 0, unformatted leakage between each stream reach
and corresponding model cell will be saved to the main cell-by-cell
budget file whenever when a cell-by-cell budget has been specified in
Output Control (see Harbaugh and others, 2000, pages 52-55). If
ipakcb = 0, leakage values will not be printed or saved. Printing to
the listing file (ipakcb < 0) is not supported.
istcb2 : integer
An integer value used as a flag for writing to a separate formatted
file all information on inflows and outflows from each reach; on
stream depth, width, and streambed conductance; and on head difference
and gradient across the streambed. If ISTCB2 > 0, then ISTCB2 also
represents the unit number to which all information for each stream
reach will be saved to a separate file when a cell-by-cell budget has
been specified in Output Control. If ISTCB2 < 0, it is the unit number
to which unformatted streamflow out of each reach will be saved to a
file whenever the cell-by-cell budget has been specified in Output
Control. Unformatted output will be saved to <model name>.sfq.
isfropt : integer
An integer value that defines the format of the input data and whether
or not unsaturated flow is simulated beneath streams. Values of ISFROPT
are defined as follows
0 No vertical unsaturated flow beneath streams. Streambed elevations,
stream slope, streambed thickness, and streambed hydraulic
conductivity are read for each stress period using variables
defined in Items 6b and 6c; the optional variables in Item 2 are
not used.
1 No vertical unsaturated flow beneath streams. Streambed elevation,
stream slope, streambed thickness, and streambed hydraulic
conductivity are read for each reach only once at the beginning of
the simulation using optional variables defined in Item 2; Items 6b
and 6c are used to define stream width and depth for ICALC = 0 and
stream width for ICALC = 1.
2 Streambed and unsaturated-zone properties are read for each reach
only once at the beginning of the simulation using optional
variables defined in Item 2; Items 6b and 6c are used to define
stream width and depth for ICALC = 0 and stream width for
ICALC = 1. When using the LPF Package, saturated vertical
hydraulic conductivity for the unsaturated zone is the same as
the vertical hydraulic conductivity of the corresponding layer in
LPF and input variable UHC is not read.
3 Same as 2 except saturated vertical hydraulic conductivity for the
unsaturated zone (input variable UHC) is read for each reach.
4 Streambed and unsaturated-zone properties are read for the
beginning and end of each stream segment using variables defined
in Items 6b and 6c; the optional variables in Item 2 are not used.
Streambed properties can vary each stress period. When using the
LPF Package, saturated vertical hydraulic conductivity for the
unsaturated zone is the same as the vertical hydraulic conductivity
of the corresponding layer in LPF and input variable UHC1 is not
read.
5 Same as 4 except saturated vertical hydraulic conductivity for the
unsaturated zone (input variable UHC1) is read for each segment at
the beginning of the first stress period only.
nstrail : integer
An integer value that is the number of trailing wave increments used to
represent a trailing wave. Trailing waves are used to represent a
decrease in the surface infiltration rate. The value can be increased
to improve mass balance in the unsaturated zone. Values between 10 and
20 work well and result in unsaturated-zone mass balance errors beneath
streams ranging between 0.001 and 0.01 percent. Please see Smith (1983)
for further details. (default is 10; for MODFLOW-2005 simulations only
when isfropt > 1)
isuzn : integer
An integer value that is the maximum number of vertical cells used to
define the unsaturated zone beneath a stream reach. If ICALC is 1 for
all segments then ISUZN should be set to 1. (default is 1; for
MODFLOW-2005 simulations only when isfropt > 1)
nsfrsets : integer
An integer value that is the maximum number of different sets of
trailing waves used to allocate arrays. Arrays are allocated by
multiplying NSTRAIL by NSFRSETS. A value of 30 is sufficient for
problems where the stream depth varies often. NSFRSETS does not affect
model run time. (default is 30; for MODFLOW-2005 simulations only
when isfropt > 1)
irtflg : integer
An integer value that indicates whether transient streamflow routing is
active. IRTFLG must be specified if NSTRM < 0. If IRTFLG > 0,
streamflow will be routed using the kinematic-wave equation (see USGS
Techniques and Methods 6-D1, p. 68-69); otherwise, IRTFLG should be
specified as 0. Transient streamflow routing is only available for
MODFLOW-2005; IRTFLG can be left blank for MODFLOW-2000 simulations.
(default is 1)
numtim : integer
An integer value equal to the number of sub time steps used to route
streamflow. The time step that will be used to route streamflow will
be equal to the MODFLOW time step divided by NUMTIM. (default is 2;
for MODFLOW-2005 simulations only when irtflg > 0)
weight : float
A real number equal to the time weighting factor used to calculate the
change in channel storage. WEIGHT has a value between 0.5 and 1. Please
refer to equation 83 in USGS Techniques and Methods 6-D1 for further
details. (default is 0.75; for MODFLOW-2005 simulations only when
irtflg > 0)
flwtol : float
A real number equal to the streamflow tolerance for convergence of the
kinematic wave equation used for transient streamflow routing. A value
of 0.00003 cubic meters per second has been used successfully in test
simulations (and would need to be converted to whatever units are being
used in the particular simulation). (default is 0.0001; for
MODFLOW-2005 simulations only when irtflg > 0)
reach_data : recarray
Numpy record array of length equal to nstrm, with columns for each
variable entered in item 2 (see SFR package input instructions). In
following flopy convention, layer, row, column and node number
(for unstructured grids) are zero-based; segment and reach are
one-based.
segment_data : recarray
Numpy record array of length equal to nss, with columns for each
variable entered in items 6a, 6b and 6c (see SFR package input
instructions). Segment numbers are one-based.
dataset_5 : dict of lists
Optional; will be built automatically from segment_data unless
specified. Dict of lists, with key for each stress period. Each list
contains the variables [itmp, irdflag, iptflag]. (see SFR documentation
for more details):
itmp : list of integers (len = NPER)
For each stress period, an integer value for reusing or reading stream
segment data that can change each stress period. If ITMP = 0 then all
stream segment data are defined by Item 4 (NSFRPAR > 0; number of
stream parameters is greater than 0). If ITMP > 0, then stream segment
data are not defined in Item 4 and must be defined in Item 6 below for
a number of segments equal to the value of ITMP. If ITMP < 0, then
stream segment data not defined in Item 4 will be reused from the last
stress period (Item 6 is not read for the current stress period). ITMP
must be defined >= 0 for the first stress period of a simulation.
irdflag : int or list of integers (len = NPER)
For each stress period, an integer value for printing input data
specified for this stress period. If IRDFLG = 0, input data for this
stress period will be printed. If IRDFLG > 0, then input data for this
stress period will not be printed.
iptflag : int or list of integers (len = NPER)
For each stress period, an integer value for printing streamflow-
routing results during this stress period. If IPTFLG = 0, or whenever
the variable ICBCFL or "Save Budget" is specified in Output Control,
the results for specified time steps during this stress period will be
printed. If IPTFLG > 0, then the results during this stress period will
not be printed.
extension : string
Filename extension (default is 'sfr')
unit_number : int
File unit number (default is None).
filenames : str or list of str
Filenames to use for the package and the output files. If
filenames=None the package name will be created using the model name
and package extension and the cbc output and sfr output name will be
created using the model name and .cbc the .sfr.bin/.sfr.out extensions
(for example, modflowtest.cbc, and modflowtest.sfr.bin), if ipakcbc and
istcb2 are numbers greater than zero. If a single string is passed the
package name will be set to the string and other uzf output files will
be set to the model name with the appropriate output file extensions.
To define the names for all package files (input and output) the
length of the list of strings should be 3. Default is None.
Attributes
----------
outlets : nested dictionary
Contains the outlet for each SFR segment; format is
{per: {segment: outlet}} This attribute is created by the
get_outlets() method.
outsegs : dictionary of arrays
Each array is of shape nss rows x maximum of nss columns. The first
column contains the SFR segments, the second column contains the
outsegs of those segments; the third column the outsegs of the outsegs,
and so on, until all outlets have been encountered, or nss is reached.
The latter case indicates circular routing. This attribute is created
by the get_outlets() method.
Methods
-------
See Also
--------
Notes
-----
Parameters are not supported in FloPy.
MODFLOW-OWHM is not supported.
The Ground-Water Transport (GWT) process is not supported.
Limitations on which features are supported...
Examples
--------
>>> import flopy
>>> ml = flopy.modflow.Modflow()
>>> sfr2 = flopy.modflow.ModflowSfr2(ml, ...)
"""
_options = OrderedDict(
[
("reachinput", OptionBlock.simple_flag),
("transroute", OptionBlock.simple_flag),
("tabfiles", OptionBlock.simple_tabfile),
(
"lossfactor",
{
OptionBlock.dtype: np.bool_,
OptionBlock.nested: True,
OptionBlock.n_nested: 1,
OptionBlock.vars: {"factor": OptionBlock.simple_float},
},
),
(
"strhc1kh",
{
OptionBlock.dtype: np.bool_,
OptionBlock.nested: True,
OptionBlock.n_nested: 1,
OptionBlock.vars: {"factorkh": OptionBlock.simple_float},
},
),
(
"strhc1kv",
{
OptionBlock.dtype: np.bool_,
OptionBlock.nested: True,
OptionBlock.n_nested: 1,
OptionBlock.vars: {"factorkv": OptionBlock.simple_float},
},
),
]
)
nsfrpar = 0
heading = (
"# Streamflow-Routing (SFR2) file for MODFLOW, generated by Flopy"
)
default_value = 0.0
# LENUNI = {"u": 0, "f": 1, "m": 2, "c": 3}
len_const = {1: 1.486, 2: 1.0, 3: 100.0}
# {"u": 0, "s": 1, "m": 2, "h": 3, "d": 4, "y": 5}
time_const = {1: 1.0, 2: 60.0, 3: 3600.0, 4: 86400.0, 5: 31557600.0}
def __init__(
self,
model,
nstrm=-2,
nss=1,
nsfrpar=0,
nparseg=0,
const=None,
dleak=0.0001,
ipakcb=None,
istcb2=None,
isfropt=0,
nstrail=10,
isuzn=1,
nsfrsets=30,
irtflg=0,
numtim=2,
weight=0.75,
flwtol=0.0001,
reach_data=None,
segment_data=None,
channel_geometry_data=None,
channel_flow_data=None,
dataset_5=None,
irdflag=0,
iptflag=0,
reachinput=False,
transroute=False,
tabfiles=False,
tabfiles_dict=None,
extension="sfr",
unit_number=None,
filenames=None,
options=None,
):
"""
Package constructor
"""
# set default unit number of one is not specified
if unit_number is None:
unit_number = ModflowSfr2.defaultunit()
# set filenames
if filenames is None:
filenames = [None, None, None]
elif isinstance(filenames, str):
filenames = [filenames, None, None]
elif isinstance(filenames, list):
if len(filenames) < 3:
for _ in range(len(filenames), 3):
filenames.append(None)
# update external file information with cbc output, if necessary
if ipakcb is not None:
fname = filenames[1]
model.add_output_file(
ipakcb, fname=fname, package=ModflowSfr2.ftype()
)
else:
ipakcb = 0
# add sfr flow output file
if istcb2 is not None:
if abs(istcb2) > 0:
binflag = False
ext = "out"
if istcb2 < 0:
binflag = True
ext = "bin"
fname = filenames[2]
if fname is None:
fname = model.name + ".sfr.{}".format(ext)
model.add_output_file(
abs(istcb2),
fname=fname,
binflag=binflag,
package=ModflowSfr2.ftype(),
)
else:
istcb2 = 0
# Fill namefile items
name = [ModflowSfr2.ftype()]
units = [unit_number]
extra = [""]
# set package name
fname = [filenames[0]]
# Call ancestor's init to set self.parent, extension, name and unit number
Package.__init__(
self,
model,
extension=extension,
name=name,
unit_number=units,
extra=extra,
filenames=fname,
)
self.url = "sfr2.htm"
self._graph = None # dict of routing connections
# Dataset 0
self.heading = (
"# {} package for ".format(self.name[0])
+ " {}, ".format(model.version_types[model.version])
+ "generated by Flopy."
)
# Dataset 1a and 1b
self.reachinput = reachinput
self.transroute = transroute
self.tabfiles = tabfiles
self.tabfiles_dict = tabfiles_dict
self.numtab = 0 if not tabfiles else len(tabfiles_dict)
self.maxval = (
np.max([tb["numval"] for tb in tabfiles_dict.values()])
if self.numtab > 0
else 0
)
if options is None:
if (reachinput, transroute, tabfiles) != (False, False, False):
options = OptionBlock("", ModflowSfr2, block=False)
self.options = options
# Dataset 1c.
# number of reaches, negative value is flag for unsat.
# flow beneath streams and/or transient routing
self._nstrm = (
np.sign(nstrm) * len(reach_data)
if reach_data is not None
else nstrm
)
if segment_data is not None:
# segment_data is a zero-d array
if not isinstance(segment_data, dict):
if len(segment_data.shape) == 0:
segment_data = np.atleast_1d(segment_data)
nss = len(segment_data)
segment_data = {0: segment_data}
nss = len(set(reach_data["iseg"]))
else:
pass
# use atleast_1d for length since segment_data might be a 0D array
# this seems to be OK, because self.segment_data is produced by the constructor (never 0D)
self.nsfrpar = nsfrpar
self.nparseg = nparseg
# conversion factor used in calculating stream depth for stream reach (icalc = 1 or 2)
self._const = const if const is not None else None
self.dleak = (
dleak # tolerance level of stream depth used in computing leakage
)
self.ipakcb = ipakcb
# flag; unit number for writing table of SFR output to text file
self.istcb2 = istcb2
# if nstrm < 0
# defines the format of the input data and whether or not unsaturated flow is simulated
self.isfropt = isfropt
# if isfropt > 1
# number of trailing wave increments
self.nstrail = nstrail
# max number of vertical cells used to define unsat. zone
self.isuzn = isuzn
# max number trailing waves sets
self.nsfrsets = nsfrsets
# if nstrm < 0 (MF-2005 only)
# switch for transient streamflow routing (> 0 = kinematic wave)
self.irtflg = irtflg
# if irtflg > 0
# number of subtimesteps used for routing
self.numtim = numtim
# time weighting factor used to calculate the change in channel storage
self.weight = weight
# streamflow tolerance for convergence of the kinematic wave equation
self.flwtol = flwtol
# Dataset 2.
self.reach_data = self.get_empty_reach_data(np.abs(self._nstrm))
if reach_data is not None:
for n in reach_data.dtype.names:
self.reach_data[n] = reach_data[n]
# assign node numbers if there are none (structured grid)
if np.diff(
self.reach_data.node
).max() == 0 and self.parent.has_package("DIS"):
# first make kij list
lrc = np.array(self.reach_data)[["k", "i", "j"]].tolist()
self.reach_data["node"] = self.parent.dis.get_node(lrc)
# assign unique ID and outreach columns to each reach
self.reach_data.sort(order=["iseg", "ireach"])
new_cols = {
"reachID": np.arange(1, len(self.reach_data) + 1),
"outreach": np.zeros(len(self.reach_data)),
}
for k, v in new_cols.items():
if k not in self.reach_data.dtype.names:
recfunctions.append_fields(
self.reach_data, names=k, data=v, asrecarray=True
)
# create a stress_period_data attribute to enable parent functions (e.g. plot)
self.stress_period_data = MfList(
self, self.reach_data, dtype=self.reach_data.dtype
)
# Datasets 4 and 6.
# list of values that indicate segments outside of the model
# (depending on how SFR package was constructed)
self.not_a_segment_values = [999999]
self._segments = None
self.segment_data = {0: self.get_empty_segment_data(nss)}
if segment_data is not None:
for i in segment_data.keys():
nseg = len(segment_data[i])
self.segment_data[i] = self.get_empty_segment_data(nseg)
for n in segment_data[i].dtype.names:
# inds = (segment_data[i]['nseg'] -1).astype(int)
self.segment_data[i][n] = segment_data[i][n]
# compute outreaches if nseg and outseg columns have non-default values
if (
np.diff(self.reach_data.iseg).max() != 0
and np.max(list(set(self.graph.keys()))) != 0
and np.max(list(set(self.graph.values()))) != 0
):
if len(self.graph) == 1:
self.segment_data[0]["nseg"] = 1
self.reach_data["iseg"] = 1
consistent_seg_numbers = (
len(
set(self.reach_data.iseg).difference(
set(self.graph.keys())
)
)
== 0
)
if not consistent_seg_numbers:
warnings.warn(
"Inconsistent segment numbers of reach_data and segment_data"
)
# first convert any not_a_segment_values to 0
for v in self.not_a_segment_values:
self.segment_data[0].outseg[
self.segment_data[0].outseg == v
] = 0
self.set_outreaches()
self.channel_geometry_data = channel_geometry_data
self.channel_flow_data = channel_flow_data
# Dataset 5
# set by property from segment_data unless specified manually
self._dataset_5 = dataset_5
self.irdflag = irdflag
self.iptflag = iptflag
# Attributes not included in SFR package input
# dictionary of arrays; see Attributes section of documentation
self.outsegs = {}
# nested dictionary of format {per: {segment: outlet}}
self.outlets = {}
# input format checks:
assert isfropt in [0, 1, 2, 3, 4, 5]
# derived attributes
self._paths = None
self.parent.add_package(self)
def __setattr__(self, key, value):
if key == "nstrm":
super(ModflowSfr2, self).__setattr__("_nstrm", value)
elif key == "dataset_5":
super(ModflowSfr2, self).__setattr__("_dataset_5", value)
elif key == "segment_data":
super(ModflowSfr2, self).__setattr__("segment_data", value)
self._dataset_5 = None
elif key == "const":
super(ModflowSfr2, self).__setattr__("_const", value)
else: # return to default behavior of pakbase
super(ModflowSfr2, self).__setattr__(key, value)
@property
def const(self):
if self._const is None:
const = (
self.len_const[self.parent.dis.lenuni]
* self.time_const[self.parent.dis.itmuni]
)
else:
const = self._const
return const
@property
def nss(self):
# number of stream segments
return len(set(self.reach_data["iseg"]))
@property
def nstrm(self):
return np.sign(self._nstrm) * len(self.reach_data)
@property
def nper(self):
nper = self.parent.nrow_ncol_nlay_nper[-1]
nper = (
1 if nper == 0 else nper
) # otherwise iterations from 0, nper won't run
return nper
@property
def dataset_5(self):
"""
auto-update itmp so it is consistent with segment_data.
"""
ds5 = self._dataset_5
nss = self.nss
if ds5 is None:
irdflag = self._get_flag("irdflag")
iptflag = self._get_flag("iptflag")
ds5 = {0: [nss, irdflag[0], iptflag[0]]}
for per in range(1, self.nper):
sd = self.segment_data.get(per, None)
if sd is None:
ds5[per] = [-nss, irdflag[per], iptflag[per]]
else:
ds5[per] = [len(sd), irdflag[per], iptflag[per]]
return ds5
@property
def graph(self):
"""Dictionary of routing connections between segments."""
if self._graph is None:
self._graph = self._make_graph()
return self._graph
@property
def paths(self):
if self._paths is None:
self._set_paths()
return self._paths
# check to see if routing in segment data was changed
nseg = np.array(sorted(self._paths.keys()), dtype=int)
nseg = nseg[nseg > 0].copy()
outseg = np.array([self._paths[k][1] for k in nseg])
existing_nseg = sorted(list(self.graph.keys()))
existing_outseg = [self.graph[k] for k in existing_nseg]
if not np.array_equal(nseg, existing_nseg) or not np.array_equal(
outseg, existing_outseg
):
self._set_paths()
return self._paths
@property
def df(self):
if pd:
return pd.DataFrame(self.reach_data)
else:
msg = "ModflowSfr2.df: pandas not available"
raise ImportError(msg)
def _make_graph(self):
# get all segments and their outseg
graph = {}
for recarray in self.segment_data.values():
graph.update(dict(zip(recarray["nseg"], recarray["outseg"])))
outlets = set(graph.values()).difference(
set(graph.keys())
) # including lakes
graph.update({o: 0 for o in outlets if o != 0})
return graph
def _set_paths(self):
graph = self.graph
self._paths = {seg: find_path(graph, seg) for seg in graph.keys()}
def _get_flag(self, flagname):
"""
populate values for each stress period
"""
flg = self.__dict__[flagname]
flg = [flg] if np.isscalar(flg) else flg
if len(flg) < self.nper:
return flg + [flg[-1]] * (self.nper - len(flg))
return flg
@staticmethod
def get_empty_reach_data(
nreaches=0, aux_names=None, structured=True, default_value=0.0
):
# get an empty recarray that corresponds to dtype
dtype = ModflowSfr2.get_default_reach_dtype(structured=structured)
if aux_names is not None:
dtype = Package.add_to_dtype(dtype, aux_names, np.float32)
d = create_empty_recarray(nreaches, dtype, default_value=default_value)
d["reachID"] = np.arange(1, nreaches + 1)
return d
@staticmethod
def get_empty_segment_data(nsegments=0, aux_names=None, default_value=0.0):
# get an empty recarray that corresponds to dtype
dtype = ModflowSfr2.get_default_segment_dtype()
if aux_names is not None:
dtype = Package.add_to_dtype(dtype, aux_names, np.float32)
d = create_empty_recarray(
nsegments, dtype, default_value=default_value
)
return d
@staticmethod
def get_default_reach_dtype(structured=True):
if structured:
# include node column for structured grids (useful for indexing)
return np.dtype(
[
("node", np.int),
("k", np.int),
("i", np.int),
("j", np.int),
("iseg", np.int),
("ireach", np.int),
("rchlen", np.float32),
("strtop", np.float32),
("slope", np.float32),
("strthick", np.float32),
("strhc1", np.float32),
("thts", np.float32),
("thti", np.float32),
("eps", np.float32),
("uhc", np.float32),
("reachID", np.int),
("outreach", np.int),
]
)
else:
return np.dtype(
[
("node", np.int),
("iseg", np.int),
("ireach", np.int),
("rchlen", np.float32),
("strtop", np.float32),
("slope", np.float32),
("strthick", np.float32),
("strhc1", np.float32),
("thts", np.float32),
("thti", np.float32),
("eps", np.float32),
("uhc", np.float32),
("reachID", np.int),
("outreach", np.int),
]
)
@staticmethod
def get_default_segment_dtype():
return np.dtype(
[
("nseg", np.int),
("icalc", np.int),
("outseg", np.int),
("iupseg", np.int),
("iprior", np.int),
("nstrpts", np.int),
("flow", np.float32),
("runoff", np.float32),
("etsw", np.float32),
("pptsw", np.float32),
("roughch", np.float32),
("roughbk", np.float32),
("cdpth", np.float32),
("fdpth", np.float32),
("awdth", np.float32),
("bwdth", np.float32),
("hcond1", np.float32),
("thickm1", np.float32),
("elevup", np.float32),
("width1", np.float32),
("depth1", np.float32),
("thts1", np.float32),
("thti1", np.float32),
("eps1", np.float32),
("uhc1", np.float32),
("hcond2", np.float32),
("thickm2", np.float32),
("elevdn", np.float32),
("width2", np.float32),
("depth2", np.float32),
("thts2", np.float32),
("thti2", np.float32),
("eps2", np.float32),
("uhc2", np.float32),
]
)
@staticmethod
def load(f, model, nper=None, gwt=False, nsol=1, ext_unit_dict=None):
if model.verbose:
sys.stdout.write("loading sfr2 package file...\n")
tabfiles = False
tabfiles_dict = {}
transroute = False
reachinput = False
structured = model.structured
if nper is None:
nper = model.nper
nper = (
1 if nper == 0 else nper
) # otherwise iterations from 0, nper won't run
openfile = not hasattr(f, "read")
if openfile:
filename = f
f = open(filename, "r")
# Item 0 -- header
while True:
line = f.readline()
if line[0] != "#":
break
options = None
if model.version == "mfnwt" and "options" in line.lower():
options = OptionBlock.load_options(f, ModflowSfr2)
else:
query = (
"reachinput",
"transroute",
"tabfiles",
"lossfactor",
"strhc1kh",
"strhc1kv",
)
for i in query:
if i in line.lower():
options = OptionBlock(
line.lower().strip(), ModflowSfr2, block=False
)
break
if options is not None:
line = f.readline()
# check for 1b in modflow-2005
if "tabfile" in line.lower():
t = line.strip().split()
options.tabfiles = True
options.numtab = int(t[1])
options.maxval = int(t[2])
line = f.readline()
# set varibles to be passed to class args
transroute = options.transroute
reachinput = options.reachinput
tabfiles = isinstance(options.tabfiles, np.ndarray)
numtab = options.numtab if tabfiles else 0
# item 1c
(
nstrm,
nss,
nsfrpar,
nparseg,
const,
dleak,
ipakcb,
istcb2,
isfropt,
nstrail,
isuzn,
nsfrsets,
irtflg,
numtim,
weight,
flwtol,
option,
) = _parse_1c(line, reachinput=reachinput, transroute=transroute)
# item 2
# set column names, dtypes
names = _get_item2_names(nstrm, reachinput, isfropt, structured)
dtypes = [
d
for d in ModflowSfr2.get_default_reach_dtype().descr
if d[0] in names
]
lines = []
for i in range(abs(nstrm)):
line = f.readline()
line = line_parse(line)
ireach = tuple(map(float, line[: len(dtypes)]))
lines.append(ireach)
tmp = np.array(lines, dtype=dtypes)
# initialize full reach_data array with all possible columns
reach_data = ModflowSfr2.get_empty_reach_data(len(lines))
for n in names:
reach_data[n] = tmp[
n
] # not sure if there's a way to assign multiple columns
# zero-based convention
inds = ["k", "i", "j"] if structured else ["node"]
_markitzero(reach_data, inds)
# items 3 and 4 are skipped (parameters not supported)
# item 5
segment_data = {}
channel_geometry_data = {}
channel_flow_data = {}
dataset_5 = {}
aux_variables = (
{}
) # not sure where the auxiliary variables are supposed to go
for i in range(0, nper):
# Dataset 5
dataset_5[i] = _get_dataset(f.readline(), [-1, 0, 0, 0])
itmp = dataset_5[i][0]
if itmp > 0:
# Item 6
current = ModflowSfr2.get_empty_segment_data(
nsegments=itmp, aux_names=option
)
# container to hold any auxiliary variables
current_aux = {}
# these could also be implemented as structured arrays with a column for segment number
current_6d = {}
current_6e = {}
# print(i,icalc,nstrm,isfropt,reachinput)
for j in range(itmp):
dataset_6a = _parse_6a(f.readline(), option)
current_aux[j] = dataset_6a[-1]
dataset_6a = dataset_6a[:-1] # drop xyz
icalc = dataset_6a[1]
# link dataset 6d, 6e by nseg of dataset_6a
temp_nseg = dataset_6a[0]
# datasets 6b and 6c aren't read under the conditions below
# see table under description of dataset 6c,
# in the MODFLOW Online Guide for a description
# of this logic
# https://water.usgs.gov/ogw/modflow-nwt/MODFLOW-NWT-Guide/sfr.htm
dataset_6b, dataset_6c = (0,) * 9, (0,) * 9
if not (
isfropt in [2, 3] and icalc == 1 and i > 1
) and not (isfropt in [1, 2, 3] and icalc >= 2):
dataset_6b = _parse_6bc(
f.readline(),
icalc,
nstrm,
isfropt,
reachinput,
per=i,
)
dataset_6c = _parse_6bc(
f.readline(),
icalc,
nstrm,
isfropt,
reachinput,
per=i,
)
current[j] = dataset_6a + dataset_6b + dataset_6c
if icalc == 2:
# ATL: not sure exactly how isfropt logic functions for this
# dataset 6d description suggests that this line isn't read for isfropt > 1
# but description of icalc suggest that icalc=2 (8-point channel) can be used with any isfropt
if (
i == 0
or nstrm > 0
and not reachinput
or isfropt <= 1
):
dataset_6d = []
for _ in range(2):
dataset_6d.append(
_get_dataset(f.readline(), [0.0] * 8)
)
# dataset_6d.append(list(map(float, f.readline().strip().split())))
current_6d[temp_nseg] = dataset_6d
if icalc == 4:
nstrpts = dataset_6a[5]
dataset_6e = []
for _ in range(3):
dataset_6e.append(
_get_dataset(f.readline(), [0.0] * nstrpts)
)
current_6e[temp_nseg] = dataset_6e
segment_data[i] = current
aux_variables[j + 1] = current_aux
if len(current_6d) > 0:
channel_geometry_data[i] = current_6d
if len(current_6e) > 0:
channel_flow_data[i] = current_6e
if tabfiles and i == 0:
for j in range(numtab):
segnum, numval, iunit = map(
int, f.readline().strip().split()
)
tabfiles_dict[segnum] = {"numval": numval, "inuit": iunit}
else:
continue
if openfile:
f.close()
# determine specified unit number
unitnumber = None
filenames = [None, None, None]
if ext_unit_dict is not None:
for key, value in ext_unit_dict.items():
if value.filetype == ModflowSfr2.ftype():
unitnumber = key
filenames[0] = os.path.basename(value.filename)
if ipakcb > 0:
if key == ipakcb:
filenames[1] = os.path.basename(value.filename)
model.add_pop_key_list(key)
if abs(istcb2) > 0:
if key == abs(istcb2):
filenames[2] = os.path.basename(value.filename)
model.add_pop_key_list(key)
return ModflowSfr2(
model,
nstrm=nstrm,
nss=nss,
nsfrpar=nsfrpar,
nparseg=nparseg,
const=const,
dleak=dleak,
ipakcb=ipakcb,
istcb2=istcb2,
isfropt=isfropt,
nstrail=nstrail,
isuzn=isuzn,
nsfrsets=nsfrsets,
irtflg=irtflg,
numtim=numtim,
weight=weight,
flwtol=flwtol,
reach_data=reach_data,
segment_data=segment_data,
dataset_5=dataset_5,
channel_geometry_data=channel_geometry_data,
channel_flow_data=channel_flow_data,
reachinput=reachinput,
transroute=transroute,
tabfiles=tabfiles,
tabfiles_dict=tabfiles_dict,
unit_number=unitnumber,
filenames=filenames,
options=options,
)
def check(self, f=None, verbose=True, level=1, checktype=None):
"""
Check sfr2 package data for common errors.
Parameters
----------
f : str or file handle
String defining file name or file handle for summary file
of check method output. If a string is passed a file handle
is created. If f is None, check method does not write
results to a summary file. (default is None)
verbose : bool
Boolean flag used to determine if check method results are
written to the screen
level : int
Check method analysis level. If level=0, summary checks are
performed. If level=1, full checks are performed.
Returns
-------
None
Examples
--------
>>> import flopy
>>> m = flopy.modflow.Modflow.load('model.nam')
>>> m.sfr2.check()
"""
self._graph = None # remake routing graph from segment data
chk = check(self, verbose=verbose, level=level)
chk.for_nans()
chk.numbering()
chk.routing()
chk.overlapping_conductance()
chk.elevations()
chk.slope()
if f is not None:
if isinstance(f, str):
pth = os.path.join(self.parent.model_ws, f)
f = open(pth, "w")
f.write("{}\n".format(chk.txt))
# f.close()
return chk
def assign_layers(self, adjust_botms=False, pad=1.0):
"""
Assigns the appropriate layer for each SFR reach,
based on cell bottoms at location of reach.
Parameters
----------
adjust_botms : bool
Streambed bottom elevations below the model bottom
will cause an error in MODFLOW. If True, adjust
bottom elevations in lowest layer of the model
so they are at least pad distance below any co-located
streambed elevations.
pad : scalar
Minimum distance below streambed bottom to set
any conflicting model bottom elevations.
Notes
-----
Streambed bottom = strtop - strthick
This routine updates the elevations in the botm array
of the flopy.model.ModflowDis instance. To produce a
new DIS package file, model.write() or flopy.model.ModflowDis.write()
must be run.
"""
streambotms = self.reach_data.strtop - self.reach_data.strthick
i, j = self.reach_data.i, self.reach_data.j
layers = self.parent.dis.get_layer(i, j, streambotms)
# check against model bottom
logfile = "sfr_botm_conflicts.chk"
mbotms = self.parent.dis.botm.array[-1, i, j]
below = streambotms <= mbotms
below_i = self.reach_data.i[below]
below_j = self.reach_data.j[below]
l = []
header = ""
if np.any(below):
print(
"Warning: SFR streambed elevations below model bottom. "
"See sfr_botm_conflicts.chk"
)
if not adjust_botms:
l += [below_i, below_j, mbotms[below], streambotms[below]]
header += "i,j,model_botm,streambed_botm"
else:
print("Fixing elevation conflicts...")
botm = self.parent.dis.botm.array.copy()
for ib, jb in zip(below_i, below_j):
inds = (self.reach_data.i == ib) & (
self.reach_data.j == jb
)
botm[-1, ib, jb] = streambotms[inds].min() - pad
# l.append(botm[-1, ib, jb])
# botm[-1, below_i, below_j] = streambotms[below] - pad
l.append(botm[-1, below_i, below_j])
header += ",new_model_botm"
self.parent.dis.botm = botm
mbotms = self.parent.dis.botm.array[-1, i, j]
assert not np.any(streambotms <= mbotms)
print(
"New bottom array assigned to Flopy DIS package "
"instance.\nRun flopy.model.write() or "
"flopy.model.ModflowDis.write() to write new DIS file."
)
header += "\n"
with open(logfile, "w") as log:
log.write(header)
a = np.array(l).transpose()
for line in a:
log.write(",".join(map(str, line)) + "\n")
self.reach_data["k"] = layers
def deactivate_ibound_above(self):
"""
Sets ibound to 0 for all cells above active SFR cells.
Parameters
----------
none
Notes
-----
This routine updates the ibound array of the flopy.model.ModflowBas6
instance. To produce a new BAS6 package file, model.write() or
flopy.model.ModflowBas6.write() must be run.
"""
ib = self.parent.bas6.ibound.array
deact_lays = [list(range(i)) for i in self.reach_data.k]
for ks, i, j in zip(deact_lays, self.reach_data.i, self.reach_data.j):
for k in ks:
ib[k, i, j] = 0
self.parent.bas6.ibound = ib
def get_outlets(self, level=0, verbose=True):
"""
Traces all routing connections from each headwater to the outlet.
"""
txt = ""
for per in range(self.nper):
if (
per > 0 > self.dataset_5[per][0]
): # skip stress periods where seg data not defined
continue
# segments = self.segment_data[per].nseg
# outsegs = self.segment_data[per].outseg
#
# all_outsegs = np.vstack([segments, outsegs])
# max_outseg = all_outsegs[-1].max()
# knt = 1
# while max_outseg > 0:
#
# nextlevel = np.array([outsegs[s - 1] if s > 0 and s < 999999 else 0
# for s in all_outsegs[-1]])
#
# all_outsegs = np.vstack([all_outsegs, nextlevel])
# max_outseg = nextlevel.max()
# if max_outseg == 0:
# break
# knt += 1
# if knt > self.nss:
# # subset outsegs map to only include rows with outseg number > 0 in last column
# circular_segs = all_outsegs.T[all_outsegs[-1] > 0]
#
# # only retain one instance of each outseg number at iteration=nss
# vals = [] # append outseg values to vals after they've appeared once
# mask = [(True, vals.append(v))[0]
# if v not in vals
# else False for v in circular_segs[-1]]
# circular_segs = circular_segs[:, np.array(mask)]
#
# # cull the circular segments array to remove duplicate instances of routing circles
# circles = []
# duplicates = []
# for i in range(np.shape(circular_segs)[0]):
# # find where values in the row equal the last value;
# # record the index of the second to last instance of last value
# repeat_start_ind = np.where(circular_segs[i] == circular_segs[i, -1])[0][-2:][0]
# # use that index to slice out the repeated segment sequence
# circular_seq = circular_segs[i, repeat_start_ind:].tolist()
# # keep track of unique sequences of repeated segments
# if set(circular_seq) not in circles:
# circles.append(set(circular_seq))
# duplicates.append(False)
# else:
# duplicates.append(True)
# circular_segs = circular_segs[~np.array(duplicates), :]
#
# txt += '{0} instances where an outlet was not found after {1} consecutive segments!\n' \
# .format(len(circular_segs), self.nss)
# if level == 1:
# txt += '\n'.join([' '.join(map(str, row)) for row in circular_segs]) + '\n'
# else:
# f = 'circular_routing.csv'
# np.savetxt(f, circular_segs, fmt='%d', delimiter=',', header=txt)
# txt += 'See {} for details.'.format(f)
# if verbose:
# print(txt)
# break
# # the array of segment sequence is useful for other other operations,
# # such as plotting elevation profiles
# self.outsegs[per] = all_outsegs
#
# use graph instead of above loop
nrow = len(self.segment_data[per].nseg)
ncol = np.max(
[len(v) if v is not None else 0 for v in self.paths.values()]
)
all_outsegs = np.zeros((nrow, ncol), dtype=int)
for i, (k, v) in enumerate(self.paths.items()):
if k > 0:
all_outsegs[i, : len(v)] = v
all_outsegs.sort(axis=0)
self.outsegs[per] = all_outsegs
# create a dictionary listing outlets associated with each segment
# outlet is the last value in each row of outseg array that is != 0 or 999999
# self.outlets[per] = {i + 1: r[(r != 0) & (r != 999999)][-1]
# if len(r[(r != 0) & (r != 999999)]) > 0
# else i + 1
# for i, r in enumerate(all_outsegs.T)}
self.outlets[per] = {
k: self.paths[k][-1] if k in self.paths else k
for k in self.segment_data[per].nseg
}
return txt
def reset_reaches(self):
self.reach_data.sort(order=["iseg", "ireach"])
reach_data = self.reach_data
segment_data = list(set(self.reach_data.iseg)) # self.segment_data[0]
reach_counts = np.bincount(reach_data.iseg)[1:]
reach_counts = dict(zip(range(1, len(reach_counts) + 1), reach_counts))
ireach = [list(range(1, reach_counts[s] + 1)) for s in segment_data]
ireach = np.concatenate(ireach)
self.reach_data["ireach"] = ireach
def set_outreaches(self):
"""
Determine the outreach for each SFR reach (requires a reachID
column in reach_data). Uses the segment routing specified for the
first stress period to route reaches between segments.
"""
self.reach_data.sort(order=["iseg", "ireach"])
# ensure that each segment starts with reach 1
self.reset_reaches()
# ensure that all outsegs are segments, outlets, or negative (lakes)
self.repair_outsegs()
rd = self.reach_data
outseg = self.graph
reach1IDs = dict(
zip(rd[rd.ireach == 1].iseg, rd[rd.ireach == 1].reachID)
)
outreach = []
for i in range(len(rd)):
# if at the end of reach data or current segment
if i + 1 == len(rd) or rd.ireach[i + 1] == 1:
nextseg = outseg[rd.iseg[i]] # get next segment
if nextseg > 0: # current reach is not an outlet
nextrchid = reach1IDs[
nextseg
] # get reach 1 of next segment
else:
nextrchid = 0
else: # otherwise, it's the next reachID
nextrchid = rd.reachID[i + 1]
outreach.append(nextrchid)
self.reach_data["outreach"] = outreach
def get_slopes(
self, default_slope=0.001, minimum_slope=0.0001, maximum_slope=1.0
):
"""
Compute slopes by reach using values in strtop (streambed top)
and rchlen (reach length) columns of reach_data. The slope for a
reach n is computed as strtop(n+1) - strtop(n) / rchlen(n).
Slopes for outlet reaches are set equal to a default value
(default_slope). Populates the slope column in reach_data.
Parameters
----------
default_slope : float
Slope value applied to outlet reaches
(where water leaves the model). Default value is 0.001
minimum_slope : float
Assigned to reaches with computed slopes less than this value.
This ensures that the Manning's equation won't produce unreasonable
values of stage (in other words, that stage is consistent with
assumption that streamflow is primarily drive by the streambed
gradient). Default value is 0.0001.
maximum_slope : float
Assigned to reaches with computed slopes more than this value.
Default value is 1.
"""
# compute outreaches if they aren't there already
if np.diff(self.reach_data.outreach).max() == 0:
self.set_outreaches()
rd = self.reach_data
elev = dict(zip(rd.reachID, rd.strtop))
dist = dict(zip(rd.reachID, rd.rchlen))
dnelev = {
rid: elev[rd.outreach[i]] if rd.outreach[i] != 0 else -9999
for i, rid in enumerate(rd.reachID)
}
slopes = np.array(
[
(elev[i] - dnelev[i]) / dist[i]
if dnelev[i] != -9999
else default_slope
for i in rd.reachID
]
)
slopes[slopes < minimum_slope] = minimum_slope
slopes[slopes > maximum_slope] = maximum_slope
self.reach_data["slope"] = slopes
def get_upsegs(self):
"""
From segment_data, returns nested dict of all upstream segments by
segment, by stress period.
Returns
-------
all_upsegs : dict
Nested dictionary of form
{stress period: {segment: [list of upsegs]}}
Notes
-----
This method will not work if there are instances of circular routing.
"""
all_upsegs = {}
for per in range(self.nper):
if (
per > 0 > self.dataset_5[per][0]
): # skip stress periods where seg data not defined
continue
segment_data = self.segment_data[per]
# make a list of adjacent upsegments keyed to outseg list in Mat2
upsegs = {
o: segment_data.nseg[segment_data.outseg == o].tolist()
for o in np.unique(segment_data.outseg)
}
outsegs = [
k for k in list(upsegs.keys()) if k > 0
] # exclude 0, which is the outlet designator
# for each outseg key, for each upseg, check for more upsegs,
# append until headwaters has been reached
for outseg in outsegs:
up = True
upsegslist = upsegs[outseg]
while up:
added_upsegs = []
for us in upsegslist:
if us in outsegs:
added_upsegs += upsegs[us]
if len(added_upsegs) == 0:
up = False
break
else:
upsegslist = added_upsegs
upsegs[outseg] += added_upsegs
# the above algorithm is recursive, so lower order streams
# get duplicated many times use a set to get unique upsegs
all_upsegs[per] = {u: list(set(upsegs[u])) for u in outsegs}
return all_upsegs
def get_variable_by_stress_period(self, varname):
dtype = []
all_data = np.zeros((self.nss, self.nper), dtype=float)
for per in range(self.nper):
inds = self.segment_data[per].nseg - 1
all_data[inds, per] = self.segment_data[per][varname]
dtype.append(("{}{}".format(varname, per), float))
isvar = all_data.sum(axis=1) != 0
ra = np.core.records.fromarrays(
all_data[isvar].transpose().copy(), dtype=dtype
)
segs = self.segment_data[0].nseg[isvar]
isseg = np.array(
[True if s in segs else False for s in self.reach_data.iseg]
)
isinlet = isseg & (self.reach_data.ireach == 1)
rd = np.array(self.reach_data[isinlet])[
["k", "i", "j", "iseg", "ireach"]
]
ra = recfunctions.merge_arrays([rd, ra], flatten=True, usemask=False)
return ra.view(np.recarray)
def repair_outsegs(self):
isasegment = np.in1d(
self.segment_data[0].outseg, self.segment_data[0].nseg
)
isasegment = isasegment | (self.segment_data[0].outseg < 0)
self.segment_data[0]["outseg"][~isasegment] = 0.0
self._graph = None
def renumber_segments(self):
"""
Renumber segments so that segment numbering is continuous and always
increases in the downstream direction. This may speed convergence of
the NWT solver in some situations.
Returns
-------
r : dictionary mapping old segment numbers to new
"""
nseg = sorted(list(self.graph.keys()))
outseg = [self.graph[k] for k in nseg]
# explicitly fix any gaps in the numbering
# (i.e. from removing segments)
nseg2 = np.arange(1, len(nseg) + 1)
# intermediate mapping that
r1 = dict(zip(nseg, nseg2))
r1[0] = 0
outseg2 = np.array([r1[s] for s in outseg])
# function re-assigning upseg numbers consecutively at one level
# relative to outlet(s). Counts down from the number of segments
def reassign_upsegs(r, nexts, upsegs):
nextupsegs = []
for u in upsegs:
r[u] = nexts if u > 0 else u # handle lakes
nexts -= 1
nextupsegs += list(nseg2[outseg2 == u])
return r, nexts, nextupsegs
ns = len(nseg)
# start at outlets with nss;
# renumber upsegs consecutively at each level
# until all headwaters have been reached
nexts = ns
r2 = {0: 0}
nextupsegs = nseg2[outseg2 == 0]
for _ in range(ns):
r2, nexts, nextupsegs = reassign_upsegs(r2, nexts, nextupsegs)
if len(nextupsegs) == 0:
break
# map original segment numbers to new numbers
r = {k: r2.get(v, v) for k, v in r1.items()}
# renumber segments in all stress period data
for per in self.segment_data.keys():
self.segment_data[per]["nseg"] = [
r.get(s, s) for s in self.segment_data[per].nseg
]
self.segment_data[per]["outseg"] = [
r.get(s, s) for s in self.segment_data[per].outseg
]
self.segment_data[per].sort(order="nseg")
nseg = self.segment_data[per].nseg
outseg = self.segment_data[per].outseg
inds = (outseg > 0) & (nseg > outseg)
assert not np.any(inds)
assert (
len(self.segment_data[per]["nseg"])
== self.segment_data[per]["nseg"].max()
)
self._graph = None # reset routing dict
# renumber segments in reach_data
self.reach_data["iseg"] = [r.get(s, s) for s in self.reach_data.iseg]
self.reach_data.sort(order=["iseg", "ireach"])
self.reach_data["reachID"] = np.arange(1, len(self.reach_data) + 1)
self.set_outreaches() # reset the outreaches to ensure continuity
# renumber segments in other datasets
def renumber_channel_data(d):
if d is not None:
d2 = {}
for k, v in d.items():
d2[k] = {}
for s, vv in v.items():
d2[k][r[s]] = vv
else:
d2 = None
return d2
self.channel_geometry_data = renumber_channel_data(
self.channel_geometry_data
)
self.channel_flow_data = renumber_channel_data(self.channel_flow_data)
return r
def plot_path(self, start_seg=None, end_seg=0, plot_segment_lines=True):
"""
Plot a profile of streambed elevation and model top
along a path of segments.
Parameters
----------
start_seg : int
Number of first segment in path.
end_seg : int
Number of last segment in path (defaults to 0/outlet).
plot_segment_lines : bool
Controls plotting of segment end locations along profile.
(default True)
Returns
-------
ax : matplotlib.axes._subplots.AxesSubplot object
"""
try:
import matplotlib.pyplot as plt
except:
err_msg = (
"matplotlib must be installed to use "
+ "ModflowSfr2.plot_path()"
)
raise ImportError(err_msg)
if not pd:
err_msg = "ModflowSfr2.plot_path: pandas not available"
raise ImportError(err_msg)
df = self.df
m = self.parent
mfunits = m.sr.model_length_units
to_miles = {"feet": 1 / 5280.0, "meters": 1 / (0.3048 * 5280.0)}
# slice the path
path = np.array(self.paths[start_seg])
endidx = np.where(path == end_seg)[0]
endidx = endidx if len(endidx) > 0 else None
path = path[: np.squeeze(endidx)]
path = [s for s in path if s > 0] # skip lakes for now
# get the values
groups = df.groupby("iseg")
tmp = pd.concat([groups.get_group(s) for s in path])
tops = m.dis.top.array[tmp.i, tmp.j]
dist = np.cumsum(tmp.rchlen.values) * to_miles.get(mfunits, 1.0)
# segment starts
starts = dist[np.where(tmp.ireach.values == 1)[0]]
ax = plt.subplots(figsize=(11, 8.5))[-1]
ax.plot(dist, tops, label="Model top")
ax.plot(dist, tmp.strtop, label="Streambed top")
ax.set_xlabel("Distance along path, in miles")
ax.set_ylabel("Elevation, in {}".format(mfunits))
ymin, ymax = ax.get_ylim()
plt.autoscale(False)
if plot_segment_lines: # plot segment ends as vertical lines
ax.vlines(
x=starts,
ymin=ymin,
ymax=ymax,
lw=0.1,
alpha=0.1,
label="Gray lines indicate\nsegment ends.",
)
ax.legend()
# plot selected segment numbers along path
stride = np.floor(len(dist) / 10)
stride = 1 if stride < 1 else stride
inds = np.arange(0, len(dist), stride, dtype=int)
plot_segnumbers = tmp.iseg.values[inds]
xlocs = dist[inds]
pad = 0.04 * (ymax - ymin)
for x, sn in zip(xlocs, plot_segnumbers):
ax.text(x, ymin + pad, "{}".format(sn), va="top")
ax.text(
xlocs[0],
ymin + pad * 1.2,
"Segment numbers:",
va="bottom",
fontweight="bold",
)
ax.text(
dist[-1], ymin + pad, "{}".format(end_seg), ha="center", va="top"
)
return ax
def _get_headwaters(self, per=0):
"""
List all segments that are not outsegs (that do not have any
segments upstream).
Parameters
----------
per : int
Stress period for which to list headwater segments (default 0)
Returns
-------
headwaters : np.ndarray (1-D)
One dimensional array listing all headwater segments.
"""
upsegs = [
self.segment_data[per]
.nseg[self.segment_data[per].outseg == s]
.tolist()
for s in self.segment_data[0].nseg
]
return self.segment_data[per].nseg[
np.array([i for i, u in enumerate(upsegs) if len(u) == 0])
]
def _interpolate_to_reaches(self, segvar1, segvar2, per=0):
"""
Interpolate values in datasets 6b and 6c to each reach in
stream segment
Parameters
----------
segvar1 : str
Column/variable name in segment_data array for representing start
of segment (e.g. hcond1 for hydraulic conductivity)
For segments with icalc=2 (specified channel geometry); if width1
is given, the eighth distance point (XCPT8) from dataset 6d will
be used as the stream width.
For icalc=3, an arbitrary width of 5 is assigned.
For icalc=4, the mean value for width given in item 6e is used.
segvar2 : str
Column/variable name in segment_data array for representing start
of segment (e.g. hcond2 for hydraulic conductivity)
per : int
Stress period with segment data to interpolate
Returns
-------
reach_values : 1D array
One dimensional array of interpolated values of same length as
reach_data array. For example, hcond1 and hcond2 could be entered
as inputs to get values for the strhc1 (hydraulic conductivity)
column in reach_data.
"""
reach_data = self.reach_data
segment_data = self.segment_data[per]
segment_data.sort(order="nseg")
reach_data.sort(order=["iseg", "ireach"])
reach_values = []
for seg in segment_data.nseg:
reaches = reach_data[reach_data.iseg == seg]
dist = np.cumsum(reaches.rchlen) - 0.5 * reaches.rchlen
icalc = segment_data.icalc[segment_data.nseg == seg]
# get width from channel cross section length
if "width" in segvar1 and icalc == 2:
channel_geometry_data = self.channel_geometry_data[per]
reach_values += list(
np.ones(len(reaches)) * channel_geometry_data[seg][0][-1]
)
# assign arbitrary width since width is based on flow
elif "width" in segvar1 and icalc == 3:
reach_values += list(np.ones(len(reaches)) * 5)
# assume width to be mean from streamflow width/flow table
elif "width" in segvar1 and icalc == 4:
channel_flow_data = self.channel_flow_data[per]
reach_values += list(
np.ones(len(reaches)) * np.mean(channel_flow_data[seg][2])
)
else:
fp = [
segment_data[segment_data["nseg"] == seg][segvar1][0],
segment_data[segment_data["nseg"] == seg][segvar2][0],
]
xp = [dist[0], dist[-1]]
reach_values += np.interp(dist, xp, fp).tolist()
return np.array(reach_values)
def _write_1c(self, f_sfr):
# NSTRM NSS NSFRPAR NPARSEG CONST DLEAK ipakcb ISTCB2
# [ISFROPT] [NSTRAIL] [ISUZN] [NSFRSETS] [IRTFLG] [NUMTIM] [WEIGHT] [FLWTOL]
f_sfr.write(
"{:.0f} {:.0f} {:.0f} {:.0f} {:.8f} {:.8f} {:.0f} {:.0f} ".format(
self.nstrm,
self.nss,
self.nsfrpar,
self.nparseg,
self.const,
self.dleak,
self.ipakcb,
self.istcb2,
)
)
if self.reachinput:
self.nstrm = abs(
self.nstrm
) # see explanation for dataset 1c in online guide
f_sfr.write("{:.0f} ".format(self.isfropt))
if self.isfropt > 1:
f_sfr.write(
"{:.0f} {:.0f} {:.0f} ".format(
self.nstrail, self.isuzn, self.nsfrsets
)
)
if self.nstrm < 0:
f_sfr.write("{:.0f} ".format(self.isfropt))
if self.isfropt > 1:
f_sfr.write(
"{:.0f} {:.0f} {:.0f} ".format(
self.nstrail, self.isuzn, self.nsfrsets
)
)
if self.nstrm < 0 or self.transroute:
f_sfr.write("{:.0f} ".format(self.irtflg))
if self.irtflg > 0:
f_sfr.write(
"{:.0f} {:.8f} {:.8f} ".format(
self.numtim, self.weight, self.flwtol
)
)
f_sfr.write("\n")
def _write_reach_data(self, f_sfr):
# Write the recarray (data) to the file (or file handle) f
assert isinstance(self.reach_data, np.recarray), (
"MfList.__tofile() data arg " + "not a recarray"
)
# decide which columns to write
# columns = self._get_item2_names()
columns = _get_item2_names(
self.nstrm,
self.reachinput,
self.isfropt,
structured=self.parent.structured,
)
# Add one to the kij indices
# names = self.reach_data.dtype.names
# lnames = []
# [lnames.append(name.lower()) for name in names]
# --make copy of data for multiple calls
d = np.array(self.reach_data)
for idx in ["k", "i", "j", "node"]:
if idx in columns:
d[idx] += 1
d = d[columns] # data columns sorted
formats = _fmt_string(d) + "\n"
for rec in d:
f_sfr.write(formats.format(*rec))
def _write_segment_data(self, i, j, f_sfr):
cols = [
"nseg",
"icalc",
"outseg",
"iupseg",
"iprior",
"nstrpts",
"flow",
"runoff",
"etsw",
"pptsw",
"roughch",
"roughbk",
"cdpth",
"fdpth",
"awdth",
"bwdth",
]
seg_dat = np.array(self.segment_data[i])[cols][j]
fmts = _fmt_string_list(seg_dat)
(
nseg,
icalc,
outseg,
iupseg,
iprior,
nstrpts,
flow,
runoff,
etsw,
pptsw,
roughch,
roughbk,
cdpth,
fdpth,
awdth,
bwdth,
) = [0 if v == self.default_value else v for v in seg_dat]
f_sfr.write(
" ".join(fmts[0:4]).format(nseg, icalc, outseg, iupseg) + " "
)
if iupseg > 0:
f_sfr.write(fmts[4].format(iprior) + " ")
if icalc == 4:
f_sfr.write(fmts[5].format(nstrpts) + " ")
f_sfr.write(
" ".join(fmts[6:10]).format(flow, runoff, etsw, pptsw) + " "
)
if icalc in [1, 2]:
f_sfr.write(fmts[10].format(roughch) + " ")
if icalc == 2:
f_sfr.write(fmts[11].format(roughbk) + " ")
if icalc == 3:
f_sfr.write(
" ".join(fmts[12:16]).format(cdpth, fdpth, awdth, bwdth) + " "
)
f_sfr.write("\n")
self._write_6bc(
i,
j,
f_sfr,
cols=[
"hcond1",
"thickm1",
"elevup",
"width1",
"depth1",
"thts1",
"thti1",
"eps1",
"uhc1",
],
)
self._write_6bc(
i,
j,
f_sfr,
cols=[
"hcond2",
"thickm2",
"elevdn",
"width2",
"depth2",
"thts2",
"thti2",
"eps2",
"uhc2",
],
)
def _write_6bc(self, i, j, f_sfr, cols=()):
cols = list(cols)
icalc = self.segment_data[i][j][1]
seg_dat = np.array(self.segment_data[i])[cols][j]
fmts = _fmt_string_list(seg_dat)
hcond, thickm, elevupdn, width, depth, thts, thti, eps, uhc = [
0 if v == self.default_value else v for v in seg_dat
]
if self.isfropt in [0, 4, 5] and icalc <= 0:
f_sfr.write(
" ".join(fmts[0:5]).format(
hcond, thickm, elevupdn, width, depth
)
+ " "
)
elif self.isfropt in [0, 4, 5] and icalc == 1:
f_sfr.write(fmts[0].format(hcond) + " ")
if i == 0:
f_sfr.write(
" ".join(fmts[1:4]).format(thickm, elevupdn, width) + " "
)
if self.isfropt in [4, 5]:
f_sfr.write(
" ".join(fmts[5:8]).format(thts, thti, eps) + " "
)
if self.isfropt == 5:
f_sfr.write(fmts[8].format(uhc) + " ")
elif i > 0 and self.isfropt == 0:
f_sfr.write(
" ".join(fmts[1:4]).format(thickm, elevupdn, width) + " "
)
elif self.isfropt in [0, 4, 5] and icalc >= 2:
f_sfr.write(fmts[0].format(hcond) + " ")
if self.isfropt in [4, 5] and i > 0 and icalc == 2:
pass
else:
f_sfr.write(" ".join(fmts[1:3]).format(thickm, elevupdn) + " ")
if self.isfropt in [4, 5] and icalc == 2 and i == 0:
f_sfr.write(
" ".join(fmts[3:6]).format(thts, thti, eps) + " "
)
if self.isfropt == 5:
f_sfr.write(fmts[8].format(uhc) + " ")
else:
pass
elif self.isfropt == 1 and icalc <= 1:
f_sfr.write(fmts[3].format(width) + " ")
if icalc <= 0:
f_sfr.write(fmts[4].format(depth) + " ")
elif self.isfropt in [2, 3]:
if icalc <= 0:
f_sfr.write(fmts[3].format(width) + " ")
f_sfr.write(fmts[4].format(depth) + " ")
elif icalc == 1:
if i > 0:
pass
else:
f_sfr.write(fmts[3].format(width) + " ")
else:
pass
else:
return
f_sfr.write("\n")
def write_file(self, filename=None):
"""
Write the package file.
Returns
-------
None
"""
# tabfiles = False
# tabfiles_dict = {}
# transroute = False
# reachinput = False
if filename is not None:
self.fn_path = filename
f_sfr = open(self.fn_path, "w")
# Item 0 -- header
f_sfr.write("{0}\n".format(self.heading))
# Item 1
if (
isinstance(self.options, OptionBlock)
and self.parent.version == "mfnwt"
):
self.options.update_from_package(self)
self.options.write_options(f_sfr)
elif isinstance(self.options, OptionBlock):
self.options.update_from_package(self)
self.options.block = False
self.options.write_options(f_sfr)
else:
pass
self._write_1c(f_sfr)
# item 2
self._write_reach_data(f_sfr)
# items 3 and 4 are skipped (parameters not supported)
for i in range(0, self.nper):
# item 5
itmp = self.dataset_5[i][0]
f_sfr.write(" ".join(map(str, self.dataset_5[i])) + "\n")
if itmp > 0:
# Item 6
for j in range(itmp):
# write datasets 6a, 6b and 6c
self._write_segment_data(i, j, f_sfr)
icalc = self.segment_data[i].icalc[j]
nseg = self.segment_data[i].nseg[j]
if icalc == 2:
# or isfropt <= 1:
if (
i == 0
or self.nstrm > 0
and not self.reachinput
or self.isfropt <= 1
):
for k in range(2):
for d in self.channel_geometry_data[i][nseg][
k
]:
f_sfr.write("{:.2f} ".format(d))
f_sfr.write("\n")
if icalc == 4:
# nstrpts = self.segment_data[i][j][5]
for k in range(3):
for d in self.channel_flow_data[i][nseg][k]:
f_sfr.write("{:.2f} ".format(d))
f_sfr.write("\n")
if self.tabfiles and i == 0:
for j in sorted(self.tabfiles_dict.keys()):
f_sfr.write(
"{:.0f} {:.0f} {:.0f}\n".format(
j,
self.tabfiles_dict[j]["numval"],
self.tabfiles_dict[j]["inuit"],
)
)
else:
continue
f_sfr.close()
def export(self, f, **kwargs):
if isinstance(f, str) and f.lower().endswith(".shp"):
from flopy.utils.geometry import Polygon
from flopy.export.shapefile_utils import recarray2shp
geoms = []
for ix, i in enumerate(self.reach_data.i):
verts = self.parent.modelgrid.get_cell_vertices(
i, self.reach_data.j[ix]
)
geoms.append(Polygon(verts))
recarray2shp(self.reach_data, geoms, shpname=f, **kwargs)
else:
from flopy import export
return export.utils.package_export(f, self, **kwargs)
def export_linkages(self, f, **kwargs):
"""
Export linework shapefile showing all routing connections between
SFR reaches. A length field containing the distance between connected
reaches can be used to filter for the longest connections in a GIS.
"""
from flopy.utils.geometry import LineString
from flopy.export.shapefile_utils import recarray2shp
rd = self.reach_data.copy()
m = self.parent
rd.sort(order=["reachID"])
# get the cell centers for each reach
mg = m.modelgrid
x0 = mg.xcellcenters[rd.i, rd.j]
y0 = mg.ycellcenters[rd.i, rd.j]
loc = dict(zip(rd.reachID, zip(x0, y0)))
# make lines of the reach connections between cell centers
geoms = []
lengths = []
for r in rd.reachID:
x0, y0 = loc[r]
outreach = rd.outreach[r - 1]
if outreach == 0:
x1, y1 = x0, y0
else:
x1, y1 = loc[outreach]
geoms.append(LineString([(x0, y0), (x1, y1)]))
lengths.append(np.sqrt((x1 - x0) ** 2 + (y1 - y0) ** 2))
lengths = np.array(lengths)
# append connection lengths for filtering in GIS
rd = recfunctions.append_fields(
rd,
names=["length"],
data=[lengths],
usemask=False,
asrecarray=True,
)
recarray2shp(rd, geoms, f, **kwargs)
def export_outlets(self, f, **kwargs):
"""
Export point shapefile showing locations where streamflow is leaving
the model (outset=0).
"""
from flopy.utils.geometry import Point
from flopy.export.shapefile_utils import recarray2shp
rd = self.reach_data
if np.min(rd.outreach) == np.max(rd.outreach):
self.set_outreaches()
rd = self.reach_data[self.reach_data.outreach == 0].copy()
m = self.parent
rd.sort(order=["iseg", "ireach"])
# get the cell centers for each reach
mg = m.modelgrid
x0 = mg.xcellcenters[rd.i, rd.j]
y0 = mg.ycellcenters[rd.i, rd.j]
geoms = [Point(x, y) for x, y in zip(x0, y0)]
recarray2shp(rd, geoms, f, **kwargs)
def export_transient_variable(self, f, varname, **kwargs):
"""
Export point shapefile showing locations with a given segment_data
variable applied. For example, segments where streamflow is entering
or leaving the upstream end of a stream segment (FLOW) or where RUNOFF
is applied. Cell centroids of the first reach of segments with non-zero
terms of varname are exported; values of varname are exported by stress
period in the attribute fields (e.g. flow0, flow1, flow2... for FLOW
in stress periods 0, 1, 2...
Parameters
----------
f : str, filename
varname : str
Variable in SFR Package dataset 6a (see SFR package documentation)
"""
from flopy.utils.geometry import Point
from flopy.export.shapefile_utils import recarray2shp
rd = self.reach_data
if np.min(rd.outreach) == np.max(rd.outreach):
self.set_outreaches()
ra = self.get_variable_by_stress_period(varname.lower())
# get the cell centers for each reach
m = self.parent
mg = m.modelgrid
x0 = mg.xcellcenters[ra.i, ra.j]
y0 = mg.ycellcenters[ra.i, ra.j]
geoms = [Point(x, y) for x, y in zip(x0, y0)]
recarray2shp(ra, geoms, f, **kwargs)
@staticmethod
def ftype():
return "SFR"
@staticmethod
def defaultunit():
return 17
class check:
"""
Check SFR2 package for common errors
Parameters
----------
sfrpackage : object
Instance of Flopy ModflowSfr2 class.
verbose : bool
Boolean flag used to determine if check method results are
written to the screen
level : int
Check method analysis level. If level=0, summary checks are
performed. If level=1, full checks are performed.
Notes
-----
Daniel Feinstein's top 10 SFR problems (7/16/2014):
1) cell gaps btw adjacent reaches in a single segment
2) cell gaps btw routed segments. possibly because of re-entry problems at domain edge
3) adjacent reaches with STOP sloping the wrong way
4) routed segments with end/start sloping the wrong way
5) STOP>TOP1 violations, i.e.,floaters
6) STOP<<TOP1 violations, i.e., exaggerated incisions
7) segments that end within one diagonal cell distance from another segment, inviting linkage
8) circular routing of segments
9) multiple reaches with non-zero conductance in a single cell
10) reaches in inactive cells
Also after running the model they will want to check for backwater effects.
"""
def __init__(self, sfrpackage, verbose=True, level=1):
self.sfr = copy.copy(sfrpackage)
try:
self.mg = self.sfr.parent.modelgrid
self.sr = self.sfr.parent.modelgrid.sr
except AttributeError:
self.sr = self.sfr.parent.sr
self.mg = None
self.reach_data = sfrpackage.reach_data
self.segment_data = sfrpackage.segment_data
self.verbose = verbose
self.level = level
self.passed = []
self.warnings = []
self.errors = []
self.txt = "\n{} ERRORS:\n".format(self.sfr.name[0])
self.summary_array = None
def _boolean_compare(
self,
array,
col1,
col2,
level0txt="{} violations encountered.",
level1txt="Violations:",
sort_ascending=True,
print_delimiter=" ",
):
"""
Compare two columns in a record array. For each row,
tests if value in col1 is greater than col2. If any values
in col1 are > col2, subsets array to only include rows where
col1 is greater. Creates another column with differences
(col1-col2), and prints the array sorted by the differences
column (diff).
Parameters
----------
array : record array
Array with columns to compare.
col1 : string
Column name in array.
col2 : string
Column name in array.
sort_ascending : T/F; default True
If True, printed array will be sorted by differences in
ascending order.
print_delimiter : str
Delimiter for printed array.
Returns
-------
txt : str
Error messages and printed array (if .level attribute of
checker is set to 1). Returns an empty string if no
values in col1 are greater than col2.
Notes
-----
info about appending to record arrays (views vs. copies and upcoming
changes to numpy):
http://stackoverflow.com/questions/22865877/how-do-i-write-to-multiple-fields-of-a-structured-array
"""
txt = ""
array = array.view(np.recarray).copy()
if isinstance(col1, np.ndarray):
array = recfunctions.append_fields(
array, names="tmp1", data=col1, asrecarray=True
)
col1 = "tmp1"
if isinstance(col2, np.ndarray):
array = recfunctions.append_fields(
array, names="tmp2", data=col2, asrecarray=True
)
col2 = "tmp2"
if isinstance(col1, tuple):
array = recfunctions.append_fields(
array, names=col1[0], data=col1[1], asrecarray=True
)
col1 = col1[0]
if isinstance(col2, tuple):
array = recfunctions.append_fields(
array, names=col2[0], data=col2[1], asrecarray=True
)
col2 = col2[0]
failed = array[col1] > array[col2]
if np.any(failed):
failed_info = np.array(array)[failed]
txt += level0txt.format(len(failed_info)) + "\n"
if self.level == 1:
diff = failed_info[col2] - failed_info[col1]
cols = [
c
for c in failed_info.dtype.names
if failed_info[c].sum() != 0
and c != "diff"
and "tmp" not in c
]
failed_info = recfunctions.append_fields(
failed_info[cols].copy(),
names="diff",
data=diff,
usemask=False,
asrecarray=False,
)
failed_info.sort(order="diff", axis=0)
if not sort_ascending:
failed_info = failed_info[::-1]
txt += level1txt + "\n"
txt += _print_rec_array(failed_info, delimiter=print_delimiter)
txt += "\n"
return txt
def _txt_footer(
self, headertxt, txt, testname, passed=False, warning=True
):
if len(txt) == 0 or passed:
txt += "passed."
self.passed.append(testname)
elif warning:
self.warnings.append(testname)
else:
self.errors.append(testname)
if self.verbose:
print(txt + "\n")
self.txt += headertxt + txt + "\n"
def for_nans(self):
"""
Check for nans in reach or segment data
"""
headertxt = "Checking for nan values...\n"
txt = ""
passed = False
isnan = np.any(np.isnan(np.array(self.reach_data.tolist())), axis=1)
nanreaches = self.reach_data[isnan]
if np.any(isnan):
txt += "Found {} reachs with nans:\n".format(len(nanreaches))
if self.level == 1:
txt += _print_rec_array(nanreaches, delimiter=" ")
for per, sd in self.segment_data.items():
isnan = np.any(np.isnan(np.array(sd.tolist())), axis=1)
nansd = sd[isnan]
if np.any(isnan):
txt += "Per {}: found {} segments with nans:\n".format(
per, len(nanreaches)
)
if self.level == 1:
txt += _print_rec_array(nansd, delimiter=" ")
if len(txt) == 0:
passed = True
self._txt_footer(headertxt, txt, "nan values", passed)
def run_all(self):
return self.sfr.check()
def numbering(self):
"""
Checks for continuity in segment and reach numbering
"""
headertxt = (
"Checking for continuity in segment and reach numbering...\n"
)
if self.verbose:
print(headertxt.strip())
txt = ""
passed = False
sd = self.segment_data[0]
# check segment numbering
txt += _check_numbers(
self.sfr.nss, sd["nseg"], level=self.level, datatype="segment"
)
# check reach numbering
for segment in np.arange(1, self.sfr.nss + 1):
reaches = self.reach_data.ireach[self.reach_data.iseg == segment]
t = _check_numbers(
len(reaches), reaches, level=self.level, datatype="reach"
)
if len(t) > 0:
txt += "Segment {} has {}".format(segment, t)
if txt == "":
passed = True
self._txt_footer(
headertxt,
txt,
"continuity in segment and reach numbering",
passed,
warning=False,
)
headertxt = "Checking for increasing segment numbers in downstream direction...\n"
txt = ""
passed = False
if self.verbose:
print(headertxt.strip())
# for per, segment_data in self.segment_data.items():
inds = (sd.outseg < sd.nseg) & (sd.outseg > 0)
if len(txt) == 0 and np.any(inds):
decreases = np.array(sd[inds])[["nseg", "outseg"]]
txt += "Found {} segment numbers decreasing in the downstream direction.\n".format(
len(decreases)
)
txt += "MODFLOW will run but convergence may be slowed:\n"
if self.level == 1:
txt += "nseg outseg\n"
t = ""
for nseg, outseg in decreases:
t += "{} {}\n".format(nseg, outseg)
txt += t # '\n'.join(textwrap.wrap(t, width=10))
if len(t) == 0:
passed = True
self._txt_footer(headertxt, txt, "segment numbering order", passed)
def routing(self):
"""
Checks for breaks in routing and does comprehensive check for
circular routing
"""
headertxt = "Checking for circular routing...\n"
txt = ""
if self.verbose:
print(headertxt.strip())
# txt += self.sfr.get_outlets(level=self.level, verbose=False) # will print twice if verbose=True
# simpler check method using paths from routing graph
circular_segs = [k for k, v in self.sfr.paths.items() if v is None]
if len(circular_segs) > 0:
txt += "{0} instances where an outlet was not found after {1} consecutive segments!\n".format(
len(circular_segs), self.sfr.nss
)
if self.level == 1:
txt += " ".join(map(str, circular_segs)) + "\n"
else:
f = os.path.join(
self.sfr.parent._model_ws, "circular_routing.chk.csv"
)
np.savetxt(
f, circular_segs, fmt="%d", delimiter=",", header=txt
)
txt += "See {} for details.".format(f)
if self.verbose:
print(txt)
self._txt_footer(headertxt, txt, "circular routing", warning=False)
# check reach connections for proximity
if self.mg is not None or self.mg is not None:
rd = self.sfr.reach_data.copy()
rd.sort(order=["reachID"])
try:
xcentergrid, ycentergrid, zc = self.mg.get_cellcenters()
del zc
except AttributeError:
xcentergrid = self.mg.xcellcenters
ycentergrid = self.mg.ycellcenters
x0 = xcentergrid[rd.i, rd.j]
y0 = ycentergrid[rd.i, rd.j]
loc = dict(zip(rd.reachID, zip(x0, y0)))
# compute distances between node centers of connected reaches
headertxt = "Checking reach connections for proximity...\n"
txt = ""
if self.verbose:
print(headertxt.strip())
dist = []
for r in rd.reachID:
x0, y0 = loc[r]
outreach = rd.outreach[r - 1]
if outreach == 0:
dist.append(0)
else:
x1, y1 = loc[outreach]
dist.append(np.sqrt((x1 - x0) ** 2 + (y1 - y0) ** 2))
dist = np.array(dist)
# compute max width of reach nodes (hypotenuse for rectangular nodes)
delr = self.mg.delr
delc = self.mg.delc
dx = delr[rd.j] # (delr * self.sr.length_multiplier)[rd.j]
dy = delc[rd.i] # (delc * self.sr.length_multiplier)[rd.i]
hyp = np.sqrt(dx ** 2 + dy ** 2)
# breaks are when the connection distance is greater than
# max node with * a tolerance
# 1.25 * hyp is greater than distance of two diagonally adjacent nodes
# where one is 1.5x larger than the other
breaks = np.where(dist > hyp * 1.25)
breaks_reach_data = rd[breaks]
segments_with_breaks = set(breaks_reach_data.iseg)
if len(breaks) > 0:
txt += (
"{0} segments ".format(len(segments_with_breaks))
+ "with non-adjacent reaches found.\n"
)
if self.level == 1:
txt += "At segments:\n"
txt += " ".join(map(str, segments_with_breaks)) + "\n"
else:
f = os.path.join(
self.sfr.parent._model_ws,
"reach_connection_gaps.chk.csv",
)
rd.tofile(f, sep="\t")
txt += "See {} for details.".format(f)
if self.verbose:
print(txt)
self._txt_footer(
headertxt, txt, "reach connections", warning=False
)
else:
txt += (
"No DIS package or SpatialReference object; cannot "
+ "check reach proximities."
)
self._txt_footer(headertxt, txt, "")
def overlapping_conductance(self, tol=1e-6):
"""
Checks for multiple SFR reaches in one cell; and whether more than
one reach has Cond > 0
"""
headertxt = (
"Checking for model cells with multiple non-zero "
+ "SFR conductances...\n"
)
txt = ""
if self.verbose:
print(headertxt.strip())
# make nreach vectors of each conductance parameter
reach_data = np.array(self.reach_data)
# if no dis file was supplied, can't compute node numbers
# make nodes based on unique row, col pairs
# if np.diff(reach_data.node).max() == 0:
# always use unique rc, since flopy assigns nodes by k, i, j
uniquerc = {}
for i, (r, c) in enumerate(reach_data[["i", "j"]]):
if (r, c) not in uniquerc:
uniquerc[(r, c)] = i + 1
reach_data["node"] = [
uniquerc[(r, c)] for r, c in reach_data[["i", "j"]]
]
K = reach_data["strhc1"]
if K.max() == 0:
K = self.sfr._interpolate_to_reaches("hcond1", "hcond2")
b = reach_data["strthick"]
if b.max() == 0:
b = self.sfr._interpolate_to_reaches("thickm1", "thickm2")
L = reach_data["rchlen"]
w = self.sfr._interpolate_to_reaches("width1", "width2")
# Calculate SFR conductance for each reach
binv = np.zeros(b.shape, dtype=b.dtype)
idx = b > 0.0
binv[idx] = 1.0 / b[idx]
Cond = K * w * L * binv
shared_cells = _get_duplicates(reach_data["node"])
nodes_with_multiple_conductance = set()
for node in shared_cells:
# select the collocated reaches for this cell
conductances = Cond[reach_data["node"] == node].copy()
conductances.sort()
# list nodes with multiple non-zero SFR reach conductances
if conductances[-1] != 0.0 and (
conductances[0] / conductances[-1] > tol
):
nodes_with_multiple_conductance.update({node})
if len(nodes_with_multiple_conductance) > 0:
txt += (
"{} model cells with multiple non-zero SFR conductances found.\n"
"This may lead to circular routing between collocated reaches.\n".format(
len(nodes_with_multiple_conductance)
)
)
if self.level == 1:
txt += "Nodes with overlapping conductances:\n"
reach_data["strthick"] = b
reach_data["strhc1"] = K
cols = [
c
for c in reach_data.dtype.names
if c
in [
"k",
"i",
"j",
"iseg",
"ireach",
"rchlen",
"strthick",
"strhc1",
"width",
"conductance",
]
]
reach_data = recfunctions.append_fields(
reach_data,
names=["width", "conductance"],
data=[w, Cond],
usemask=False,
asrecarray=False,
)
has_multiple = np.array(
[
True if n in nodes_with_multiple_conductance else False
for n in reach_data["node"]
]
)
reach_data = reach_data[has_multiple]
reach_data = reach_data[cols]
txt += _print_rec_array(reach_data, delimiter="\t")
self._txt_footer(headertxt, txt, "overlapping conductance")
def elevations(self, min_strtop=-10, max_strtop=15000):
"""
Checks streambed elevations for downstream rises and inconsistencies
with model grid
"""
headertxt = (
"Checking for streambed tops of less "
+ "than {}...\n".format(min_strtop)
)
txt = ""
if self.verbose:
print(headertxt.strip())
passed = False
if self.sfr.isfropt in [1, 2, 3]:
if np.diff(self.reach_data.strtop).max() == 0:
txt += "isfropt setting of 1,2 or 3 requires strtop information!\n"
else:
is_less = self.reach_data.strtop < min_strtop
if np.any(is_less):
below_minimum = self.reach_data[is_less]
txt += "{} instances of streambed top below minimum found.\n".format(
len(below_minimum)
)
if self.level == 1:
txt += "Reaches with low strtop:\n"
txt += _print_rec_array(below_minimum, delimiter="\t")
if len(txt) == 0:
passed = True
else:
txt += "strtop not specified for isfropt={}\n".format(
self.sfr.isfropt
)
passed = True
self._txt_footer(headertxt, txt, "minimum streambed top", passed)
headertxt = (
"Checking for streambed tops of "
+ "greater than {}...\n".format(max_strtop)
)
txt = ""
if self.verbose:
print(headertxt.strip())
passed = False
if self.sfr.isfropt in [1, 2, 3]:
if np.diff(self.reach_data.strtop).max() == 0:
txt += (
"isfropt setting of 1,2 or 3 "
+ "requires strtop information!\n"
)
else:
is_greater = self.reach_data.strtop > max_strtop
if np.any(is_greater):
above_max = self.reach_data[is_greater]
txt += (
"{} instances ".format(len(above_max))
+ "of streambed top above the maximum found.\n"
)
if self.level == 1:
txt += "Reaches with high strtop:\n"
txt += _print_rec_array(above_max, delimiter="\t")
if len(txt) == 0:
passed = True
else:
txt += "strtop not specified for isfropt={}\n".format(
self.sfr.isfropt
)
passed = True
self._txt_footer(headertxt, txt, "maximum streambed top", passed)
headertxt = (
"Checking segment_data for "
+ "downstream rises in streambed elevation...\n"
)
txt = ""
if self.verbose:
print(headertxt.strip())
# decide whether to check elevup and elevdn from items 6b/c
# (see online guide to SFR input; Data Set 6b description)
passed = False
if self.sfr.isfropt in [0, 4, 5]:
pers = sorted(self.segment_data.keys())
for per in pers:
segment_data = self.segment_data[per][
self.segment_data[per].elevup > -999999
]
# enforce consecutive increasing segment numbers (for indexing)
segment_data.sort(order="nseg")
t = _check_numbers(
len(segment_data),
segment_data.nseg,
level=1,
datatype="Segment",
)
if len(t) > 0:
txt += (
"Elevation check requires "
+ "consecutive segment numbering."
)
self._txt_footer(headertxt, txt, "")
return
# first check for segments where elevdn > elevup
d_elev = segment_data.elevdn - segment_data.elevup
segment_data = recfunctions.append_fields(
segment_data, names="d_elev", data=d_elev, asrecarray=True
)
txt += self._boolean_compare(
np.array(segment_data)[
["nseg", "outseg", "elevup", "elevdn", "d_elev"]
],
col1="d_elev",
col2=np.zeros(len(segment_data)),
level0txt="Stress Period {}: ".format(per + 1)
+ "{} segments encountered with elevdn > elevup.",
level1txt="Backwards segments:",
)
# next check for rises between segments
non_outlets = segment_data.outseg > 0
non_outlets_seg_data = segment_data[
non_outlets
] # lake outsegs are < 0
outseg_elevup = np.array(
[
segment_data.elevup[o - 1]
for o in segment_data.outseg
if o > 0
]
)
d_elev2 = outseg_elevup - segment_data.elevdn[non_outlets]
non_outlets_seg_data = recfunctions.append_fields(
non_outlets_seg_data,
names=["outseg_elevup", "d_elev2"],
data=[outseg_elevup, d_elev2],
usemask=False,
asrecarray=False,
)
txt += self._boolean_compare(
non_outlets_seg_data[
[
"nseg",
"outseg",
"elevdn",
"outseg_elevup",
"d_elev2",
]
],
col1="d_elev2",
col2=np.zeros(len(non_outlets_seg_data)),
level0txt="Stress Period {}: ".format(per + 1)
+ "{} segments encountered with segments encountered "
"with outseg elevup > elevdn.",
level1txt="Backwards segment connections:",
)
if len(txt) == 0:
passed = True
else:
txt += (
"Segment elevup and elevdn not "
+ "specified for nstrm="
+ "{} and isfropt={}\n".format(
self.sfr.nstrm, self.sfr.isfropt
)
)
passed = True
self._txt_footer(headertxt, txt, "segment elevations", passed)
headertxt = (
"Checking reach_data for "
+ "downstream rises in streambed elevation...\n"
)
txt = ""
if self.verbose:
print(headertxt.strip())
passed = False
if (
self.sfr.nstrm < 0
or self.sfr.reachinput
and self.sfr.isfropt in [1, 2, 3]
): # see SFR input instructions
# compute outreaches if they aren't there already
if np.diff(self.sfr.reach_data.outreach).max() == 0:
self.sfr.set_outreaches()
# compute changes in elevation
rd = self.reach_data.copy()
elev = dict(zip(rd.reachID, rd.strtop))
dnelev = {
rid: elev[rd.outreach[i]] if rd.outreach[i] != 0 else -9999
for i, rid in enumerate(rd.reachID)
}
strtopdn = np.array([dnelev[r] for r in rd.reachID])
diffs = np.array(
[
(dnelev[i] - elev[i]) if dnelev[i] != -9999 else -0.001
for i in rd.reachID
]
)
reach_data = (
self.sfr.reach_data
) # inconsistent with other checks that work with
# reach_data attribute of check class. Want to have get_outreaches as a method of sfr class
# (for other uses). Not sure if other check methods should also copy reach_data directly from
# SFR package instance for consistency.
# use outreach values to get downstream elevations
# non_outlets = reach_data[reach_data.outreach != 0]
# outreach_elevdn = np.array([reach_data.strtop[o - 1] for o in reach_data.outreach])
# d_strtop = outreach_elevdn[reach_data.outreach != 0] - non_outlets.strtop
rd = recfunctions.append_fields(
rd,
names=["strtopdn", "d_strtop"],
data=[strtopdn, diffs],
usemask=False,
asrecarray=False,
)
txt += self._boolean_compare(
rd[
[
"k",
"i",
"j",
"iseg",
"ireach",
"strtop",
"strtopdn",
"d_strtop",
"reachID",
]
],
col1="d_strtop",
col2=np.zeros(len(rd)),
level0txt="{} reaches encountered with strtop < strtop of downstream reach.",
level1txt="Elevation rises:",
)
if len(txt) == 0:
passed = True
else:
txt += "Reach strtop not specified for nstrm={}, reachinput={} and isfropt={}\n".format(
self.sfr.nstrm, self.sfr.reachinput, self.sfr.isfropt
)
passed = True
self._txt_footer(headertxt, txt, "reach elevations", passed)
headertxt = "Checking reach_data for inconsistencies between streambed elevations and the model grid...\n"
if self.verbose:
print(headertxt.strip())
txt = ""
if self.sfr.parent.dis is None:
txt += "No DIS file supplied; cannot check SFR elevations against model grid."
self._txt_footer(headertxt, txt, "")
return
passed = False
warning = True
if (
self.sfr.nstrm < 0
or self.sfr.reachinput
and self.sfr.isfropt in [1, 2, 3]
): # see SFR input instructions
reach_data = np.array(self.reach_data)
i, j, k = reach_data["i"], reach_data["j"], reach_data["k"]
# check streambed bottoms in relation to respective cell bottoms
bots = self.sfr.parent.dis.botm.array[k, i, j]
streambed_bots = reach_data["strtop"] - reach_data["strthick"]
reach_data = recfunctions.append_fields(
reach_data,
names=["layerbot", "strbot"],
data=[bots, streambed_bots],
usemask=False,
asrecarray=False,
)
txt += self._boolean_compare(
reach_data[
[
"k",
"i",
"j",
"iseg",
"ireach",
"strtop",
"strthick",
"strbot",
"layerbot",
"reachID",
]
],
col1="layerbot",
col2="strbot",
level0txt="{} reaches encountered with streambed bottom below layer bottom.",
level1txt="Layer bottom violations:",
)
if len(txt) > 0:
warning = (
False # this constitutes an error (MODFLOW won't run)
)
# check streambed elevations in relation to model top
tops = self.sfr.parent.dis.top.array[i, j]
reach_data = recfunctions.append_fields(
reach_data,
names="modeltop",
data=tops,
usemask=False,
asrecarray=False,
)
txt += self._boolean_compare(
reach_data[
[
"k",
"i",
"j",
"iseg",
"ireach",
"strtop",
"modeltop",
"strhc1",
"reachID",
]
],
col1="strtop",
col2="modeltop",
level0txt="{} reaches encountered with streambed above model top.",
level1txt="Model top violations:",
)
if len(txt) == 0:
passed = True
else:
txt += "Reach strtop, strthick not specified for nstrm={}, reachinput={} and isfropt={}\n".format(
self.sfr.nstrm, self.sfr.reachinput, self.sfr.isfropt
)
passed = True
self._txt_footer(
headertxt,
txt,
"reach elevations vs. grid elevations",
passed,
warning=warning,
)
# In cases where segment end elevations/thicknesses are used,
# do these need to be checked for consistency with layer bottoms?
headertxt = (
"Checking segment_data for inconsistencies "
+ "between segment end elevations and the model grid...\n"
)
txt = ""
if self.verbose:
print(headertxt.strip())
passed = False
if self.sfr.isfropt in [0, 4, 5]:
reach_data = self.reach_data
pers = sorted(self.segment_data.keys())
for per in pers:
segment_data = self.segment_data[per][
self.segment_data[per].elevup > -999999
]
# enforce consecutive increasing segment numbers (for indexing)
segment_data.sort(order="nseg")
t = _check_numbers(
len(segment_data),
segment_data.nseg,
level=1,
datatype="Segment",
)
if len(t) > 0:
raise Exception(
"Elevation check requires consecutive segment numbering."
)
first_reaches = reach_data[reach_data.ireach == 1].copy()
last_reaches = reach_data[
np.append((np.diff(reach_data.iseg) == 1), True)
].copy()
segment_ends = recfunctions.stack_arrays(
[first_reaches, last_reaches], asrecarray=True, usemask=False
)
segment_ends["strtop"] = np.append(
segment_data["elevup"], segment_data["elevdn"]
)
i, j = segment_ends.i, segment_ends.j
tops = self.sfr.parent.dis.top.array[i, j]
diff = tops - segment_ends.strtop
segment_ends = recfunctions.append_fields(
segment_ends,
names=["modeltop", "diff"],
data=[tops, diff],
usemask=False,
asrecarray=False,
)
txt += self._boolean_compare(
segment_ends[
[
"k",
"i",
"j",
"iseg",
"strtop",
"modeltop",
"diff",
"reachID",
]
].copy(),
col1=np.zeros(len(segment_ends)),
col2="diff",
level0txt="{} reaches encountered with streambed above model top.",
level1txt="Model top violations:",
)
if len(txt) == 0:
passed = True
else:
txt += "Segment elevup and elevdn not specified for nstrm={} and isfropt={}\n".format(
self.sfr.nstrm, self.sfr.isfropt
)
passed = True
self._txt_footer(
headertxt, txt, "segment elevations vs. model grid", passed
)
def slope(self, minimum_slope=1e-4, maximum_slope=1.0):
"""Checks that streambed slopes are greater than or equal to a specified minimum value.
Low slope values can cause "backup" or unrealistic stream stages with icalc options
where stage is computed.
"""
headertxt = "Checking for streambed slopes of less than {}...\n".format(
minimum_slope
)
txt = ""
if self.verbose:
print(headertxt.strip())
passed = False
if self.sfr.isfropt in [1, 2, 3]:
if np.diff(self.reach_data.slope).max() == 0:
txt += (
"isfropt setting of 1,2 or 3 requires slope information!\n"
)
else:
is_less = self.reach_data.slope < minimum_slope
if np.any(is_less):
below_minimum = self.reach_data[is_less]
txt += "{} instances of streambed slopes below minimum found.\n".format(
len(below_minimum)
)
if self.level == 1:
txt += "Reaches with low slopes:\n"
txt += _print_rec_array(below_minimum, delimiter="\t")
if len(txt) == 0:
passed = True
else:
txt += "slope not specified for isfropt={}\n".format(
self.sfr.isfropt
)
passed = True
self._txt_footer(headertxt, txt, "minimum slope", passed)
headertxt = "Checking for streambed slopes of greater than {}...\n".format(
maximum_slope
)
txt = ""
if self.verbose:
print(headertxt.strip())
passed = False
if self.sfr.isfropt in [1, 2, 3]:
if np.diff(self.reach_data.slope).max() == 0:
txt += (
"isfropt setting of 1,2 or 3 requires slope information!\n"
)
else:
is_greater = self.reach_data.slope > maximum_slope
if np.any(is_greater):
above_max = self.reach_data[is_greater]
txt += "{} instances of streambed slopes above maximum found.\n".format(
len(above_max)
)
if self.level == 1:
txt += "Reaches with high slopes:\n"
txt += _print_rec_array(above_max, delimiter="\t")
if len(txt) == 0:
passed = True
else:
txt += "slope not specified for isfropt={}\n".format(
self.sfr.isfropt
)
passed = True
self._txt_footer(headertxt, txt, "maximum slope", passed)
def _check_numbers(n, numbers, level=1, datatype="reach"):
"""
Check that a sequence of numbers is consecutive
(that the sequence is equal to the range from 1 to n+1, where n is
the expected length of the sequence).
Parameters
----------
n : int
Expected length of the sequence (i.e. number of stream segments)
numbers : array
Sequence of numbers (i.e. 'nseg' column from the segment_data array)
level : int
Check method analysis level. If level=0, summary checks are
performed. If level=1, full checks are performed.
datatype : str, optional
Only used for reporting.
"""
txt = ""
num_range = np.arange(1, n + 1)
if not np.array_equal(num_range, numbers):
txt += "Invalid {} numbering\n".format(datatype)
if level == 1:
# consistent dimension for boolean array
non_consecutive = np.append(np.diff(numbers) != 1, False)
gaps = num_range[non_consecutive] + 1
if len(gaps) > 0:
gapstr = " ".join(map(str, gaps))
txt += "Gaps in numbering at positions {}\n".format(gapstr)
return txt
def _isnumeric(s):
try:
float(s)
return True
except:
return False
def _markitzero(recarray, inds):
"""
Subtracts 1 from columns specified in inds argument, to convert from
1 to 0-based indexing
"""
lnames = [n.lower() for n in recarray.dtype.names]
for idx in inds:
if idx in lnames:
recarray[idx] -= 1
def _pop_item(line):
try:
return float(line.pop(0))
except:
return 0.0
def _get_dataset(line, dataset):
# interpret number supplied with decimal points as floats, rest as ints
# this could be a bad idea (vs. explicitly formatting values for each dataset)
for i, s in enumerate(line_parse(line)):
try:
n = int(s)
except:
try:
n = float(s)
except:
break
dataset[i] = n
return dataset
def _get_duplicates(a):
"""
Returns duplicate values in an array, similar to pandas .duplicated()
method
http://stackoverflow.com/questions/11528078/determining-duplicate-values-in-an-array
"""
s = np.sort(a, axis=None)
equal_to_previous_item = np.append(
s[1:] == s[:-1], False
) # maintain same dimension for boolean array
return np.unique(s[equal_to_previous_item])
def _get_item2_names(nstrm, reachinput, isfropt, structured=False):
"""
Determine which variables should be in item 2, based on model grid type,
reachinput specification, and isfropt.
Returns
-------
names : list of str
List of names (same as variables in SFR Package input instructions) of
columns to assign (upon load) or retain (upon write) in reach_data
array.
Notes
-----
Lowercase is used for all variable names.
"""
names = []
if structured:
names += ["k", "i", "j"]
else:
names += ["node"]
names += ["iseg", "ireach", "rchlen"]
if nstrm < 0 or reachinput:
if isfropt in [1, 2, 3]:
names += ["strtop", "slope", "strthick", "strhc1"]
if isfropt in [2, 3]:
names += ["thts", "thti", "eps"]
if isfropt == 3:
names += ["uhc"]
return names
def _fmt_string_list(array, float_format=default_float_format):
fmt_list = []
for name in array.dtype.names:
vtype = array.dtype[name].str[1].lower()
if vtype == "v":
continue
if vtype == "i":
fmt_list.append("{:d}")
elif vtype == "f":
fmt_list.append(float_format)
elif vtype == "o":
float_format = "{!s}"
elif vtype == "s":
raise ValueError(
"'str' type found in dtype for {!r}. "
"This gives unpredictable results when "
"recarray to file - change to 'object' type".format(name)
)
else:
raise ValueError(
"unknown dtype for {!r}: {!r}".format(name, vtype)
)
return fmt_list
def _fmt_string(array, float_format=default_float_format):
return " ".join(_fmt_string_list(array, float_format))
def _print_rec_array(
array, cols=None, delimiter=" ", float_format=default_float_format
):
"""
Print out a numpy record array to string, with column names.
Parameters
----------
cols : list of strings
List of columns to print.
delimiter : string
Delimited to use.
Returns
-------
txt : string
Text string of array.
"""
txt = ""
if cols is not None:
cols = [c for c in array.dtype.names if c in cols]
else:
cols = list(array.dtype.names)
# drop columns with no data
if np.shape(array)[0] > 1:
cols = [c for c in cols if array[c].min() > -999999]
# add _fmt_string call here
array = np.array(array)[cols]
fmts = _fmt_string_list(array, float_format=float_format)
txt += delimiter.join(cols) + "\n"
txt += "\n".join([delimiter.join(fmts).format(*r) for r in array.tolist()])
return txt
def _parse_1c(line, reachinput, transroute):
"""
Parse Data Set 1c for SFR2 package.
See http://water.usgs.gov/nrp/gwsoftware/modflow2000/MFDOC/index.html?sfr.htm for more info
Parameters
----------
line : str
line read from SFR package input file
Returns
-------
a list of length 13 containing all variables for Data Set 6a
"""
na = 0
# line = _get_dataset(line, [0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 1, 30, 1, 2, 0.75, 0.0001, []])
# line = line.strip().split()
line = line_parse(line)
nstrm = int(line.pop(0))
nss = int(line.pop(0))
nsfrpar = int(line.pop(0))
nparseg = int(line.pop(0))
const = float(line.pop(0))
dleak = float(line.pop(0))
ipakcb = int(line.pop(0))
istcb2 = int(line.pop(0))
isfropt, nstrail, isuzn, nsfrsets = na, na, na, na
if reachinput:
nstrm = abs(nstrm) # see explanation for dataset 1c in online guide
isfropt = int(line.pop(0))
if isfropt > 1:
nstrail = int(line.pop(0))
isuzn = int(line.pop(0))
nsfrsets = int(line.pop(0))
if nstrm < 0:
isfropt = int(line.pop(0))
if isfropt > 1:
nstrail = int(line.pop(0))
isuzn = int(line.pop(0))
nsfrsets = int(line.pop(0))
irtflg, numtim, weight, flwtol = na, na, na, na
if nstrm < 0 or transroute:
irtflg = int(_pop_item(line))
if irtflg > 0:
numtim = int(line.pop(0))
weight = float(line.pop(0))
flwtol = float(line.pop(0))
# auxiliary variables (MODFLOW-LGR)
option = [
line[i]
for i in np.arange(1, len(line))
if "aux" in line[i - 1].lower()
]
return (
nstrm,
nss,
nsfrpar,
nparseg,
const,
dleak,
ipakcb,
istcb2,
isfropt,
nstrail,
isuzn,
nsfrsets,
irtflg,
numtim,
weight,
flwtol,
option,
)
def _parse_6a(line, option):
"""
Parse Data Set 6a for SFR2 package.
See http://water.usgs.gov/nrp/gwsoftware/modflow2000/MFDOC/index.html?sfr.htm for more info
Parameters
----------
line : str
line read from SFR package input file
Returns
-------
a list of length 13 containing all variables for Data Set 6a
"""
# line = line.strip().split()
line = line_parse(line)
xyz = []
# handle any aux variables at end of line
for s in line:
if s.lower() in option:
xyz.append(s.lower())
na = 0
nseg = int(_pop_item(line))
icalc = int(_pop_item(line))
outseg = int(_pop_item(line))
iupseg = int(_pop_item(line))
iprior = na
nstrpts = na
if iupseg > 0:
iprior = int(_pop_item(line))
if icalc == 4:
nstrpts = int(_pop_item(line))
flow = _pop_item(line)
runoff = _pop_item(line)
etsw = _pop_item(line)
pptsw = _pop_item(line)
roughch = na
roughbk = na
if icalc in [1, 2]:
roughch = _pop_item(line)
if icalc == 2:
roughbk = _pop_item(line)
cdpth, fdpth, awdth, bwdth = na, na, na, na
if icalc == 3:
cdpth, fdpth, awdth, bwdth = map(float, line)
return (
nseg,
icalc,
outseg,
iupseg,
iprior,
nstrpts,
flow,
runoff,
etsw,
pptsw,
roughch,
roughbk,
cdpth,
fdpth,
awdth,
bwdth,
xyz,
)
def _parse_6bc(line, icalc, nstrm, isfropt, reachinput, per=0):
"""
Parse Data Set 6b for SFR2 package.
See http://water.usgs.gov/nrp/gwsoftware/modflow2000/MFDOC/index.html?sfr.htm for more info
Parameters
----------
line : str
line read from SFR package input file
Returns
-------
a list of length 9 containing all variables for Data Set 6b
"""
nvalues = sum([_isnumeric(s) for s in line_parse(line)])
line = _get_dataset(line, [0] * nvalues)
hcond, thickm, elevupdn, width, depth, thts, thti, eps, uhc = [0.0] * 9
if isfropt in [0, 4, 5] and icalc <= 0:
hcond = line.pop(0)
thickm = line.pop(0)
elevupdn = line.pop(0)
width = line.pop(0)
depth = line.pop(0)
elif isfropt in [0, 4, 5] and icalc == 1:
hcond = line.pop(0)
if isfropt in [4, 5] and per > 0:
pass
else:
thickm = line.pop(0)
elevupdn = line.pop(0)
# depth is not read if icalc == 1; see table in online guide
width = line.pop(0)
thts = _pop_item(line)
thti = _pop_item(line)
eps = _pop_item(line)
if isfropt == 5 and per == 0:
uhc = line.pop(0)
elif isfropt in [0, 4, 5] and icalc >= 2:
hcond = line.pop(0)
if isfropt in [4, 5] and per > 0 and icalc == 2:
pass
else:
thickm = line.pop(0)
elevupdn = line.pop(0)
if isfropt in [4, 5] and per == 0:
# table in online guide suggests that the following items should be present in this case
# but in the example
thts = _pop_item(line)
thti = _pop_item(line)
eps = _pop_item(line)
if isfropt == 5:
uhc = _pop_item(line)
else:
pass
elif isfropt == 1 and icalc <= 1:
width = line.pop(0)
if icalc <= 0:
depth = line.pop(0)
elif isfropt in [2, 3]:
if icalc <= 0:
width = line.pop(0)
depth = line.pop(0)
elif icalc == 1:
if per > 0:
pass
else:
width = line.pop(0)
else:
pass
else:
pass
return hcond, thickm, elevupdn, width, depth, thts, thti, eps, uhc
def find_path(graph, start, end=0):
"""Get a path through the routing network,
from a segment to an outlet.
Parameters
----------
graph : dict
Dictionary of seg : outseg numbers
start : int
Starting segment
end : int
Ending segment (default 0)
Returns
-------
path : list
List of segment numbers along routing path.
"""
graph = graph.copy()
return _find_path(graph, start, end=end)
def _find_path(graph, start, end=0, path=None):
"""Like find_path, but doesn't copy the routing
dictionary (graph) so that the recursion works.
"""
if path is None:
path = list()
path = path + [start]
if start == end:
return path
if start not in graph:
return None
if not isinstance(graph[start], list):
graph[start] = [graph[start]]
for node in graph[start]:
if node not in path:
newpath = _find_path(graph, node, end, path)
if newpath:
return newpath
return None
|
import{r as a,c as e,o as t,b as s,d as u,e as l,F as n,i as r,u as o,m as c,f as m,j as i}from"./vendor.801e32df.js";import{_ as d,a as v,b as f,s as b}from"./index.cc0145b4.js";import{_ as p,a as h,b as j,c as y,d as _}from"./Footer.11da7d07.js";const g={id:"sidebar"},x={id:"infos"};i({setup(i){const b=a("1228.8px"),A=a("921.6px"),C=m(),T=e((()=>C.getters.currentEst)),k=e((()=>C.getters.formattedTime)),E=e((()=>C.state.timer.status)),F=e((()=>C.state.runs.currentRunners)),R=e((()=>C.state.runs.currentCommentators));return t((()=>{setInterval((()=>{C.dispatch("rotateAccounts")}),1e4)})),(a,e)=>(s(),u(n,null,[l(d),l(v),l("header",null,[l(p)]),l("main",null,[l("div",g,[l(f,{small:""}),l("div",x,[(s(!0),u(n,null,r(o(F),(a=>(s(),u(h,{key:a.id,label:"Runner",value:a.name,account:a.currentAccount()},null,8,["value","account"])))),128)),(s(!0),u(n,null,r(o(R),((a,e)=>(s(),u(h,{key:e,label:"Commentator",value:a.name,account:a.currentAccount()},null,8,["value","account"])))),128)),l(h,{time:"",label:"EST"},{default:c((()=>[l(_,{value:o(T)},null,8,["value"])])),_:1}),l(h,{time:"",label:"Current Time"},{default:c((()=>[l(_,{value:o(k),status:o(E)},null,8,["value","status"])])),_:1})])]),l(j,{style:{width:b.value,height:A.value}},null,8,["style"])]),l("footer",null,[l(y)])],64))}}).use(b).mount("#root");
|
from datawinners.main.couchdb.utils import all_db_names
from datawinners.main.database import get_db_manager
import logging
from datawinners.search.index_utils import get_elasticsearch_handle
from migration.couch.utils import migrate, mark_as_completed
from mangrove.errors.MangroveException import FormModelDoesNotExistsException
from mangrove.form_model.form_model import get_form_model_by_code
from mangrove.transport.contract.survey_response import SurveyResponse
def get_all_survey_responses(dbm):
rows = dbm.load_all_rows_in_view('surveyresponse', reduce=False)
for row in rows:
yield SurveyResponse.new_from_doc(dbm=dbm, doc=SurveyResponse.__document_class__.wrap(row['value']))
def delete_submission_indexes(db_name, survey_responses_by_form_model_id):
es = get_elasticsearch_handle(timeout=600)
for form_model_id, survey_response_id in survey_responses_by_form_model_id.iteritems():
es.delete(db_name, form_model_id, survey_response_id)
def delete_all_submission_logs(db_name):
logger = logging.getLogger(db_name)
try:
dbm = get_db_manager(db_name)
documents_with_invalid_form_code = []
for survey_response in get_all_survey_responses(dbm):
form_code = survey_response.form_code
try:
get_form_model_by_code(dbm, form_code)
except FormModelDoesNotExistsException:
documents_with_invalid_form_code.append(survey_response.uuid)
survey_response.delete()
continue
if documents_with_invalid_form_code:
logger.info('Documents with Invalid form_code: %s' % (str(documents_with_invalid_form_code)))
mark_as_completed(db_name)
except Exception as e:
logger.exception(e.message)
migrate(all_db_names(), delete_all_submission_logs, version=(10, 1, 2), threads=7)
|
var ncloud = require('../../../lib/');
(function(){
var client = ncloud.createClient({
oauth_consumer_key:'%YOUR_CONSUMER_KEY%',
oauth_consumer_secret:'%YOUR_CONSUMER_SECRET%'
});
client.compute.findPublicImages( function( error, response ){
if( error ){
console.log( error );
}else {
console.log(response);
// response example =>
// [ { vmImageId: 'SPSW0LINUX000043',
// productName: 'centos-5.11-64',
// productType: { code: 'LINUX', codeName: 'Linux' },
// productDescription: 'CentOS 5.11(64bit)',
// infraResourceType: { code: 'SW', codeName: 'Software' },
// cpuCount: 0,
// memorySize: 0,
// baseBlockStorageSize: 0,
// platformType: { code: 'LNX64', codeName: 'Linux 64 Bit' },
// osInformation: 'CentOS 5.11 (64-bit)',
// addBlockStroageSize: 0 },
// /** more items **/
// ]
}
});
client.compute.findFlavors( { vmImageId: 'SPSW0LINUX000031' }, function( error, response ){
if( error ){
console.log( error );
}else{
console.log( response );
// response example =>
// [ { vmFlavorId: 'SPSVRSTAND000056',
// productName: 'vCPU 1EA, Memory 1GB, Disk 50GB',
// productType: { code: 'MICRO', codeName: 'Micro Server' },
// productDescription: 'vCPU 1EA, Memory 1GB, Disk 50GB',
// infraResourceType: { code: 'SVR', codeName: 'Server' },
// cpuCount: 1,
// memorySize: 1073741824,
// baseBlockStorageSize: 53687091200,
// osInformation: '',
// diskType: { code: 'NET', codeName: 'Network Storage' },
// addBlockStroageSize: 0 },
// /** more items**/
// }]
}
});
})();
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'chromium_code': 1,
},
'targets': [
{
'target_name': 'message_center',
'type': '<(component)',
'dependencies': [
'../../base/base.gyp:base',
'../../base/base.gyp:base_i18n',
'../../base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
'../../skia/skia.gyp:skia',
'../../url/url.gyp:url_lib',
'../base/strings/ui_strings.gyp:ui_strings',
'../gfx/gfx.gyp:gfx',
'../ui.gyp:ui',
'../ui.gyp:ui_resources',
],
'defines': [
'MESSAGE_CENTER_IMPLEMENTATION',
],
'sources': [
'cocoa/notification_controller.h',
'cocoa/notification_controller.mm',
'cocoa/popup_collection.h',
'cocoa/popup_collection.mm',
'cocoa/popup_controller.h',
'cocoa/popup_controller.mm',
'cocoa/settings_controller.h',
'cocoa/settings_controller.mm',
'cocoa/settings_entry_view.h',
'cocoa/settings_entry_view.mm',
'cocoa/status_item_view.h',
'cocoa/status_item_view.mm',
'cocoa/tray_controller.h',
'cocoa/tray_controller.mm',
'cocoa/tray_view_controller.h',
'cocoa/tray_view_controller.mm',
'dummy_message_center.cc',
'message_center.cc',
'message_center.h',
'message_center_export.h',
'notification_delegate.cc',
'notification_delegate.h',
'message_center_impl.cc',
'message_center_impl.h',
'message_center_observer.h',
'message_center_style.cc',
'message_center_style.h',
'message_center_switches.cc',
'message_center_switches.h',
'message_center_tray.cc',
'message_center_tray.h',
'message_center_tray_delegate.h',
'message_center_types.h',
'message_center_util.cc',
'message_center_util.h',
'notification.cc',
'notification.h',
'notification_blocker.cc',
'notification_blocker.h',
'notification_list.cc',
'notification_list.h',
'notification_types.cc',
'notification_types.h',
'notifier_settings.cc',
'notifier_settings.h',
'views/bounded_label.cc',
'views/bounded_label.h',
'views/message_bubble_base.cc',
'views/message_bubble_base.h',
'views/message_center_bubble.cc',
'views/message_center_bubble.h',
'views/message_center_button_bar.cc',
'views/message_center_button_bar.h',
'views/message_center_focus_border.h',
'views/message_center_focus_border.cc',
'views/message_center_view.cc',
'views/message_center_view.h',
'views/message_popup_collection.cc',
'views/message_popup_collection.h',
'views/message_view.cc',
'views/message_view.h',
'views/notifier_settings_view.cc',
'views/notifier_settings_view.h',
'views/notification_view.cc',
'views/notification_view.h',
'views/toast_contents_view.cc',
'views/toast_contents_view.h',
],
# TODO(jschuh): crbug.com/167187 fix size_t to int truncations.
'msvs_disabled_warnings': [ 4267, ],
'conditions': [
['toolkit_views==1', {
'dependencies': [
'../events/events.gyp:events',
'../views/views.gyp:views',
],
}, {
'sources/': [
['exclude', 'views/'],
],
}],
['use_ash==0', {
'sources!': [
'views/message_bubble_base.cc',
'views/message_bubble_base.h',
'views/message_center_bubble.cc',
'views/message_center_bubble.h',
'views/message_popup_bubble.cc',
'views/message_popup_bubble.h',
],
}],
['OS=="mac"', {
'dependencies': [
'../ui.gyp:ui_cocoa_third_party_toolkits',
],
'include_dirs': [
'../../third_party/GTM',
],
}],
['toolkit_views==1', {
'dependencies': [
'../compositor/compositor.gyp:compositor',
],
}],
['notifications==0', { # Android and iOS.
'sources/': [
# Exclude everything except dummy impl.
['exclude', '\\.(cc|mm)$'],
['include', '^dummy_message_center\\.cc$'],
['include', '^message_center_switches\\.cc$'],
],
}, { # notifications==1
'sources!': [ 'dummy_message_center.cc' ],
}],
],
}, # target_name: message_center
{
'target_name': 'message_center_test_support',
'type': 'static_library',
'dependencies': [
'../../base/base.gyp:base',
'../../base/base.gyp:test_support_base',
'../../skia/skia.gyp:skia',
'../gfx/gfx.gyp:gfx',
'../ui.gyp:ui',
'message_center',
],
'sources': [
'fake_message_center.h',
'fake_message_center.cc',
'fake_notifier_settings_provider.h',
'fake_notifier_settings_provider.cc',
],
}, # target_name: message_center_test_support
{
'target_name': 'message_center_unittests',
'type': 'executable',
'dependencies': [
'../../base/base.gyp:base',
'../../base/base.gyp:test_support_base',
'../../chrome/chrome_resources.gyp:packed_resources',
'../../skia/skia.gyp:skia',
'../../testing/gtest.gyp:gtest',
'../../url/url.gyp:url_lib',
'../gfx/gfx.gyp:gfx',
'../ui.gyp:ui',
'../ui_unittests.gyp:run_ui_unittests',
'../ui.gyp:ui_resources',
'../../url/url.gyp:url_lib',
'message_center',
'message_center_test_support',
],
'sources': [
'cocoa/notification_controller_unittest.mm',
'cocoa/popup_collection_unittest.mm',
'cocoa/popup_controller_unittest.mm',
'cocoa/settings_controller_unittest.mm',
'cocoa/status_item_view_unittest.mm',
'cocoa/tray_controller_unittest.mm',
'cocoa/tray_view_controller_unittest.mm',
'message_center_tray_unittest.cc',
'message_center_impl_unittest.cc',
'notification_list_unittest.cc',
'test/run_all_unittests.cc',
],
'conditions': [
['desktop_linux == 1 or chromeos == 1 or OS=="ios"', {
'dependencies': [
'../base/strings/ui_strings.gyp:ui_unittest_strings',
],
}],
['OS=="mac"', {
'dependencies': [
'../ui_unittests.gyp:ui_test_support',
],
}],
['toolkit_views==1', {
'dependencies': [
# Compositor is needed by message_center_view_unittest.cc
# and for the fonts used by bounded_label_unittest.cc.
'../compositor/compositor.gyp:compositor',
'../views/views.gyp:views',
'../views/views.gyp:views_test_support',
],
'sources': [
'views/bounded_label_unittest.cc',
'views/message_center_view_unittest.cc',
'views/message_popup_collection_unittest.cc',
'views/notifier_settings_view_unittest.cc',
],
}],
['notifications==0', { # Android and iOS.
'sources/': [
# Exclude everything except main().
['exclude', '\\.(cc|mm)$'],
['include', '^test/run_all_unittests\\.cc$'],
],
}],
# See http://crbug.com/162998#c4 for why this is needed.
['OS=="linux" and linux_use_tcmalloc==1', {
'dependencies': [
'../../base/allocator/allocator.gyp:allocator',
],
}],
],
}, # target_name: message_center_unittests
],
}
|
#pragma once
#include <libdariadb/utils/logger.h>
#include <libdariadb/utils/strings.h>
#include <stdexcept>
#include <string>
#define CODE_POS (dariadb::utils::CodePos(__FILE__, __LINE__, __FUNCTION__))
#define MAKE_EXCEPTION(msg) dariadb::utils::Exception::create_and_log(CODE_POS, msg)
// macros, because need CODE_POS
#ifdef DEBUG
#define THROW_EXCEPTION(...) \
dariadb::utils::Exception::create_and_log(CODE_POS, __VA_ARGS__); \
std::exit(1);
#else
#define THROW_EXCEPTION(...) \
throw dariadb::utils::Exception::create_and_log(CODE_POS, __VA_ARGS__);
#endif
namespace dariadb {
namespace utils {
struct CodePos {
const char *_file;
const int _line;
const char *_func;
CodePos(const char *file, int line, const char *function)
: _file(file), _line(line), _func(function) {}
std::string toString() const {
auto ss = std::string(_file) + " line: " + std::to_string(_line) + " function: " +
std::string(_func) + "\n";
return ss;
}
CodePos &operator=(const CodePos &) = delete;
};
class Exception : public std::exception {
public:
template <typename... T>
static Exception create_and_log(const CodePos &pos, T... message) {
auto expanded_message = utils::strings::args_to_string(message...);
auto ss = std::string("FATAL ERROR. The Exception will be thrown! ") +
pos.toString() + " Message: " + expanded_message;
logger_fatal(ss);
return Exception(expanded_message);
}
public:
virtual const char *what() const noexcept { return _msg.c_str(); }
const std::string &message() const { return _msg; }
protected:
Exception() {}
Exception(const char *&message) : _msg(std::string(message)) {}
Exception(const std::string &message) : _msg(message) {}
private:
std::string _msg;
};
}
}
|
#
# PySNMP MIB module RADLAN-vlanVoice-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/RADLAN-vlanVoice-MIB
# Produced by pysmi-0.3.4 at Wed May 1 14:51:35 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, ValueSizeConstraint, SingleValueConstraint, ConstraintsIntersection, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "ValueSizeConstraint", "SingleValueConstraint", "ConstraintsIntersection", "ValueRangeConstraint")
ifIndex, = mibBuilder.importSymbols("IF-MIB", "ifIndex")
VlanIndex, = mibBuilder.importSymbols("Q-BRIDGE-MIB", "VlanIndex")
VlanPriority, = mibBuilder.importSymbols("RADLAN-MIB", "VlanPriority")
vlan, = mibBuilder.importSymbols("RADLAN-vlan-MIB", "vlan")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
Bits, iso, NotificationType, IpAddress, Counter32, Gauge32, MibIdentifier, TimeTicks, Unsigned32, Counter64, MibScalar, MibTable, MibTableRow, MibTableColumn, ModuleIdentity, ObjectIdentity, Integer32 = mibBuilder.importSymbols("SNMPv2-SMI", "Bits", "iso", "NotificationType", "IpAddress", "Counter32", "Gauge32", "MibIdentifier", "TimeTicks", "Unsigned32", "Counter64", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "ModuleIdentity", "ObjectIdentity", "Integer32")
MacAddress, TruthValue, DisplayString, RowStatus, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "MacAddress", "TruthValue", "DisplayString", "RowStatus", "TextualConvention")
vlanVoice = ModuleIdentity((1, 3, 6, 1, 4, 1, 89, 48, 54))
vlanVoice.setRevisions(('2010-09-26 00:00', '2010-09-26 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: vlanVoice.setRevisionsDescriptions(('Editorial changes to support new MIB compilers.', 'Initial version of this MIB.',))
if mibBuilder.loadTexts: vlanVoice.setLastUpdated('201001090000Z')
if mibBuilder.loadTexts: vlanVoice.setOrganization('Marvell Computer Communications Ltd.')
if mibBuilder.loadTexts: vlanVoice.setContactInfo('Marvell.com')
if mibBuilder.loadTexts: vlanVoice.setDescription('The private MIB module definition for voice vlan support in Marvell devices.')
vlanVoiceAdminState = MibScalar((1, 3, 6, 1, 4, 1, 89, 48, 54, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("disabled", 0), ("auto-enabled", 1), ("auto-triggered", 2), ("oui-enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vlanVoiceAdminState.setStatus('current')
if mibBuilder.loadTexts: vlanVoiceAdminState.setDescription(' administrative voice vlan status ')
vlanVoiceOperState = MibScalar((1, 3, 6, 1, 4, 1, 89, 48, 54, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("disabled", 0), ("auto-enabled", 1), ("auto-triggered", 2), ("oui-enabled", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vlanVoiceOperState.setStatus('current')
if mibBuilder.loadTexts: vlanVoiceOperState.setDescription(' operational voice vlan status ')
vlanVoiceAdminVid = MibScalar((1, 3, 6, 1, 4, 1, 89, 48, 54, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4094)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vlanVoiceAdminVid.setStatus('current')
if mibBuilder.loadTexts: vlanVoiceAdminVid.setDescription('1-4094 actual vlan (must exist in dot1qvlan static table)')
vlanVoiceOperVid = MibScalar((1, 3, 6, 1, 4, 1, 89, 48, 54, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4094))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vlanVoiceOperVid.setStatus('current')
if mibBuilder.loadTexts: vlanVoiceOperVid.setDescription(' operational Voice Vlan ID')
vlanVoiceUcDeviceTable = MibTable((1, 3, 6, 1, 4, 1, 89, 48, 54, 10), )
if mibBuilder.loadTexts: vlanVoiceUcDeviceTable.setStatus('current')
if mibBuilder.loadTexts: vlanVoiceUcDeviceTable.setDescription('A table containing static default and UC directly connected to device configuration.')
vlanVoiceUcDeviceEntry = MibTableRow((1, 3, 6, 1, 4, 1, 89, 48, 54, 10, 1), ).setIndexNames((0, "RADLAN-vlanVoice-MIB", "vlanVoiceUcDeviceType"), (0, "RADLAN-vlanVoice-MIB", "vlanVoiceUcDeviceMacAddress"), (0, "RADLAN-vlanVoice-MIB", "vlanVoiceUcDeviceInterface"))
if mibBuilder.loadTexts: vlanVoiceUcDeviceEntry.setStatus('current')
if mibBuilder.loadTexts: vlanVoiceUcDeviceEntry.setDescription('Static and dynamic per port information for a voice VLAN.')
vlanVoiceUcDeviceType = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 48, 54, 10, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("default", 0), ("static", 1), ("uc", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vlanVoiceUcDeviceType.setStatus('current')
if mibBuilder.loadTexts: vlanVoiceUcDeviceType.setDescription('type of entry')
vlanVoiceUcDeviceMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 48, 54, 10, 1, 2), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vlanVoiceUcDeviceMacAddress.setStatus('current')
if mibBuilder.loadTexts: vlanVoiceUcDeviceMacAddress.setDescription('Mac address of UC')
vlanVoiceUcDeviceInterface = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 48, 54, 10, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vlanVoiceUcDeviceInterface.setStatus('current')
if mibBuilder.loadTexts: vlanVoiceUcDeviceInterface.setDescription('Mac interface on which UC is connected')
vlanVoiceUcDeviceVid = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 48, 54, 10, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4094))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vlanVoiceUcDeviceVid.setStatus('current')
if mibBuilder.loadTexts: vlanVoiceUcDeviceVid.setDescription('The Voice Vlan ID')
vlanVoiceUcDeviceVpt = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 48, 54, 10, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vlanVoiceUcDeviceVpt.setStatus('current')
if mibBuilder.loadTexts: vlanVoiceUcDeviceVpt.setDescription('The entry VPT')
vlanVoiceUcDeviceDscp = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 48, 54, 10, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 63))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vlanVoiceUcDeviceDscp.setStatus('current')
if mibBuilder.loadTexts: vlanVoiceUcDeviceDscp.setDescription('The entry DSCP value')
vlanVoiceUcDeviceIsBest = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 48, 54, 10, 1, 7), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vlanVoiceUcDeviceIsBest.setStatus('current')
if mibBuilder.loadTexts: vlanVoiceUcDeviceIsBest.setDescription('Indicates whether this entry is used as best local configuration')
vlanVoiceAuto = MibIdentifier((1, 3, 6, 1, 4, 1, 89, 48, 54, 11))
vlanVoiceAutoAdmin = MibIdentifier((1, 3, 6, 1, 4, 1, 89, 48, 54, 11, 1))
vlanVoiceAutoAdminVpt = MibScalar((1, 3, 6, 1, 4, 1, 89, 48, 54, 11, 1, 1), VlanPriority()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vlanVoiceAutoAdminVpt.setStatus('current')
if mibBuilder.loadTexts: vlanVoiceAutoAdminVpt.setDescription('user configured VPT for Voice Vlan operation')
vlanVoiceAutoAdminDscp = MibScalar((1, 3, 6, 1, 4, 1, 89, 48, 54, 11, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 63))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vlanVoiceAutoAdminDscp.setStatus('current')
if mibBuilder.loadTexts: vlanVoiceAutoAdminDscp.setDescription('user configured DSCP for Voice Vlan operation')
vlanVoiceAutoOper = MibIdentifier((1, 3, 6, 1, 4, 1, 89, 48, 54, 11, 2))
vlanVoiceAutoOperVpt = MibScalar((1, 3, 6, 1, 4, 1, 89, 48, 54, 11, 2, 1), VlanPriority()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vlanVoiceAutoOperVpt.setStatus('current')
if mibBuilder.loadTexts: vlanVoiceAutoOperVpt.setDescription('operational VPT for Voice Vlan operation')
vlanVoiceAutoOperDscp = MibScalar((1, 3, 6, 1, 4, 1, 89, 48, 54, 11, 2, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 63))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vlanVoiceAutoOperDscp.setStatus('current')
if mibBuilder.loadTexts: vlanVoiceAutoOperDscp.setDescription('user configured DSCP for Voice Vlan operation')
vlanVoiceAutoOperSource = MibScalar((1, 3, 6, 1, 4, 1, 89, 48, 54, 11, 2, 3), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: vlanVoiceAutoOperSource.setStatus('current')
if mibBuilder.loadTexts: vlanVoiceAutoOperSource.setDescription("The Mac address of the switch by which we've selected the VVID")
vlanVoiceAutoOperPriority = MibScalar((1, 3, 6, 1, 4, 1, 89, 48, 54, 11, 2, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 6, 10))).clone(namedValues=NamedValues(("staticActive", 0), ("staticInActive", 1), ("ucActive", 2), ("ucInActive", 3), ("default", 6), ("disabled", 10)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vlanVoiceAutoOperPriority.setStatus('current')
if mibBuilder.loadTexts: vlanVoiceAutoOperPriority.setDescription('The reason for which Voice Vlan ID was selected.')
vlanVoiceAutoRefresh = MibScalar((1, 3, 6, 1, 4, 1, 89, 48, 54, 11, 3), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vlanVoiceAutoRefresh.setStatus('current')
if mibBuilder.loadTexts: vlanVoiceAutoRefresh.setDescription('By setting the MIB to True, VSDP refresh will be executed.')
vlanVoiceAutoAgreedVlanLastChange = MibScalar((1, 3, 6, 1, 4, 1, 89, 48, 54, 11, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(12, 12)).setFixedLength(12)).setMaxAccess("readonly")
if mibBuilder.loadTexts: vlanVoiceAutoAgreedVlanLastChange.setStatus('current')
if mibBuilder.loadTexts: vlanVoiceAutoAgreedVlanLastChange.setDescription('date format is DDMMYYHHMMSS')
vlanVoiceOUIBased = MibIdentifier((1, 3, 6, 1, 4, 1, 89, 48, 54, 12))
vlanVoiceOUIBasedAdminPriority = MibScalar((1, 3, 6, 1, 4, 1, 89, 48, 54, 12, 1), VlanPriority().clone(6)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vlanVoiceOUIBasedAdminPriority.setStatus('current')
if mibBuilder.loadTexts: vlanVoiceOUIBasedAdminPriority.setDescription('An administratively assigned Priority, which will be used for all traffic on the voice vlan, this gives the packets the requested priority (CoS) within the bridge.')
vlanVoiceOUIBasedAdminRemark = MibScalar((1, 3, 6, 1, 4, 1, 89, 48, 54, 12, 2), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vlanVoiceOUIBasedAdminRemark.setStatus('current')
if mibBuilder.loadTexts: vlanVoiceOUIBasedAdminRemark.setDescription('Remark VPT on tagged frames egress the voice vlan according. to priority true.false')
vlanVoiceOUIBasedSetToDefault = MibScalar((1, 3, 6, 1, 4, 1, 89, 48, 54, 12, 3), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vlanVoiceOUIBasedSetToDefault.setStatus('current')
if mibBuilder.loadTexts: vlanVoiceOUIBasedSetToDefault.setDescription("The vlanVoiceOUIBasedSetToDefault indicates that vlanVoiceOUIBasedTable should be set to it's default values if existed (OUI default prefixes). To do so the vlanVoiceOUIBasedTable should be previously deleted by usual entries destroying. This object behaviors as write-only than reading this object will always return 'false'.")
vlanVoiceOUIBasedTable = MibTable((1, 3, 6, 1, 4, 1, 89, 48, 54, 12, 4), )
if mibBuilder.loadTexts: vlanVoiceOUIBasedTable.setStatus('current')
if mibBuilder.loadTexts: vlanVoiceOUIBasedTable.setDescription('A table containing static global configuration information for Voice VLANs OUI MAC Prefixes. All entries are permanent and will be restored after the device is reset.')
vlanVoiceOUIBasedEntry = MibTableRow((1, 3, 6, 1, 4, 1, 89, 48, 54, 12, 4, 1), ).setIndexNames((0, "RADLAN-vlanVoice-MIB", "vlanVoiceOUIBasedPrefix"))
if mibBuilder.loadTexts: vlanVoiceOUIBasedEntry.setStatus('current')
if mibBuilder.loadTexts: vlanVoiceOUIBasedEntry.setDescription('Information for a voice VLANs OUI MAC Prefixes configured into the device by management.')
vlanVoiceOUIBasedPrefix = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 48, 54, 12, 4, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(3, 3)).setFixedLength(3))
if mibBuilder.loadTexts: vlanVoiceOUIBasedPrefix.setStatus('current')
if mibBuilder.loadTexts: vlanVoiceOUIBasedPrefix.setDescription('The index value used to identify the OUI MAC Prefix component associated with this entry. The value of this object is used as an index to the vlanVoiceOUIBasedTable. Voice VLANs OUI Prefix is the first 3 most significant octets of the MAC address.')
vlanVoiceOUIBasedDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 48, 54, 12, 4, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vlanVoiceOUIBasedDescription.setStatus('current')
if mibBuilder.loadTexts: vlanVoiceOUIBasedDescription.setDescription('An optional text that describes the OUI.')
vlanVoiceOUIBasedEntryRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 48, 54, 12, 4, 1, 3), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: vlanVoiceOUIBasedEntryRowStatus.setStatus('current')
if mibBuilder.loadTexts: vlanVoiceOUIBasedEntryRowStatus.setDescription('This object indicates the status of this entry.')
vlanVoiceOUIBasedPortTable = MibTable((1, 3, 6, 1, 4, 1, 89, 48, 54, 12, 5), )
if mibBuilder.loadTexts: vlanVoiceOUIBasedPortTable.setStatus('current')
if mibBuilder.loadTexts: vlanVoiceOUIBasedPortTable.setDescription('A table containing static and dynamic per port configuration information for Voice VLAN. All entries are permanent and will be restored after the device is reset.')
vlanVoiceOUIBasedPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 89, 48, 54, 12, 5, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: vlanVoiceOUIBasedPortEntry.setStatus('current')
if mibBuilder.loadTexts: vlanVoiceOUIBasedPortEntry.setDescription('Static and dynamic per port information for a voice VLAN.')
vlanVoiceOUIBasedPortEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 48, 54, 12, 5, 1, 1), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vlanVoiceOUIBasedPortEnable.setStatus('current')
if mibBuilder.loadTexts: vlanVoiceOUIBasedPortEnable.setDescription('Enable this port to be a candidate to be added into the Voice VLAN.')
vlanVoiceOUIBasedPortVlanIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 48, 54, 12, 5, 1, 2), VlanIndex().clone(4095)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vlanVoiceOUIBasedPortVlanIndex.setStatus('current')
if mibBuilder.loadTexts: vlanVoiceOUIBasedPortVlanIndex.setDescription('The Voice VLAN-ID the port is a candidate to be in.')
vlanVoiceOUIBasedPortSecure = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 48, 54, 12, 5, 1, 3), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vlanVoiceOUIBasedPortSecure.setStatus('current')
if mibBuilder.loadTexts: vlanVoiceOUIBasedPortSecure.setDescription('Specify this port to be in Secure Mode when entering the Voice VLAN. In Secure mode only frames with MAC prefix matched to one of the OUI table prefixes are accepted, otherwise dropped.')
vlanVoiceOUIBasedPortCurrentMembership = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 48, 54, 12, 5, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("active", 1), ("notActive", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: vlanVoiceOUIBasedPortCurrentMembership.setStatus('current')
if mibBuilder.loadTexts: vlanVoiceOUIBasedPortCurrentMembership.setDescription("Port's current status of membership in Voice VLAN. Port's possible values of membership in Voice VLAN: 'Active(1)' - Port is currently added to a Voice VLAN . 'NotActive(2)' - Specifies either that port is a candidate to be in Voice VLAN or disabled.")
vlanVoiceOUIBasedPortQosMode = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 48, 54, 12, 5, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("src", 1), ("all", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vlanVoiceOUIBasedPortQosMode.setStatus('current')
if mibBuilder.loadTexts: vlanVoiceOUIBasedPortQosMode.setDescription("Port's current QOS mode in Voice VLAN. Possible values: 'src(1)' - Only traffic with OUI prefix in the source MAC received QOS of the Voice Vlan. 'all(2)' - All traffic through that port received QOS of the Voice Vlan.")
vlanVoiceOUIBasedAgingTimeout = MibScalar((1, 3, 6, 1, 4, 1, 89, 48, 54, 12, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 43200)).clone(1440)).setUnits('minutes').setMaxAccess("readwrite")
if mibBuilder.loadTexts: vlanVoiceOUIBasedAgingTimeout.setStatus('current')
if mibBuilder.loadTexts: vlanVoiceOUIBasedAgingTimeout.setDescription('The vlanVoiceAgingTimeout indicates the time (in units of minutes) from when the last OUI MAC was ageout from the FDB the port will be removed from the Voice VLAN. The default value for vlanVoiceAgingTimeout object is 1440 minutes (24 hours). The value of this object must be restored from non-volatile storage after a re-initialization of the management system.')
mibBuilder.exportSymbols("RADLAN-vlanVoice-MIB", vlanVoiceUcDeviceType=vlanVoiceUcDeviceType, vlanVoiceUcDeviceEntry=vlanVoiceUcDeviceEntry, vlanVoiceAdminState=vlanVoiceAdminState, vlanVoiceUcDeviceVid=vlanVoiceUcDeviceVid, vlanVoice=vlanVoice, vlanVoiceAutoAgreedVlanLastChange=vlanVoiceAutoAgreedVlanLastChange, PYSNMP_MODULE_ID=vlanVoice, vlanVoiceUcDeviceIsBest=vlanVoiceUcDeviceIsBest, vlanVoiceAutoRefresh=vlanVoiceAutoRefresh, vlanVoiceOUIBasedPortCurrentMembership=vlanVoiceOUIBasedPortCurrentMembership, vlanVoiceOUIBasedAgingTimeout=vlanVoiceOUIBasedAgingTimeout, vlanVoiceAutoOperDscp=vlanVoiceAutoOperDscp, vlanVoiceOUIBasedPrefix=vlanVoiceOUIBasedPrefix, vlanVoiceOUIBasedAdminPriority=vlanVoiceOUIBasedAdminPriority, vlanVoiceAutoOperVpt=vlanVoiceAutoOperVpt, vlanVoiceUcDeviceInterface=vlanVoiceUcDeviceInterface, vlanVoiceUcDeviceTable=vlanVoiceUcDeviceTable, vlanVoiceUcDeviceDscp=vlanVoiceUcDeviceDscp, vlanVoiceOUIBasedPortVlanIndex=vlanVoiceOUIBasedPortVlanIndex, vlanVoiceAutoOper=vlanVoiceAutoOper, vlanVoiceAutoOperPriority=vlanVoiceAutoOperPriority, vlanVoiceOUIBasedAdminRemark=vlanVoiceOUIBasedAdminRemark, vlanVoiceOUIBasedEntry=vlanVoiceOUIBasedEntry, vlanVoiceOUIBasedEntryRowStatus=vlanVoiceOUIBasedEntryRowStatus, vlanVoiceOUIBasedDescription=vlanVoiceOUIBasedDescription, vlanVoiceOUIBasedPortTable=vlanVoiceOUIBasedPortTable, vlanVoiceOUIBasedPortSecure=vlanVoiceOUIBasedPortSecure, vlanVoiceAuto=vlanVoiceAuto, vlanVoiceOUIBasedTable=vlanVoiceOUIBasedTable, vlanVoiceAutoAdminDscp=vlanVoiceAutoAdminDscp, vlanVoiceOUIBased=vlanVoiceOUIBased, vlanVoiceAutoOperSource=vlanVoiceAutoOperSource, vlanVoiceOUIBasedPortEnable=vlanVoiceOUIBasedPortEnable, vlanVoiceAdminVid=vlanVoiceAdminVid, vlanVoiceOUIBasedSetToDefault=vlanVoiceOUIBasedSetToDefault, vlanVoiceOUIBasedPortEntry=vlanVoiceOUIBasedPortEntry, vlanVoiceOperState=vlanVoiceOperState, vlanVoiceOperVid=vlanVoiceOperVid, vlanVoiceAutoAdmin=vlanVoiceAutoAdmin, vlanVoiceUcDeviceVpt=vlanVoiceUcDeviceVpt, vlanVoiceOUIBasedPortQosMode=vlanVoiceOUIBasedPortQosMode, vlanVoiceAutoAdminVpt=vlanVoiceAutoAdminVpt, vlanVoiceUcDeviceMacAddress=vlanVoiceUcDeviceMacAddress)
|
#include <ngx_http_push_module.h>
#include "store.h"
#include <store/rbtree_util.h>
#include <store/ngx_rwlock.h>
#include <store/ngx_http_push_module_ipc.h>
#define NGX_HTTP_PUSH_BROADCAST_CHECK(val, fail, r, errormessage) \
if (val == fail) { \
ngx_log_error(NGX_LOG_ERR, (r)->connection->log, 0, errormessage); \
ngx_http_finalize_request(r, NGX_HTTP_INTERNAL_SERVER_ERROR); \
return NULL; \
}
#define NGX_HTTP_PUSH_BROADCAST_CHECK_LOCKED(val, fail, r, errormessage, shpool) \
if (val == fail) { \
ngx_shmtx_unlock(&(shpool)->mutex); \
ngx_log_error(NGX_LOG_ERR, (r)->connection->log, 0, errormessage); \
ngx_http_finalize_request(r, NGX_HTTP_INTERNAL_SERVER_ERROR); \
return NULL; \
}
#define NGX_HTTP_BUF_ALLOC_SIZE(buf) \
(sizeof(*buf) + \
(((buf)->temporary || (buf)->memory) ? ngx_buf_size(buf) : 0) + \
(((buf)->file!=NULL) ? (sizeof(*(buf)->file) + (buf)->file->name.len + 1) : 0))
#define ENQUEUED_DBG "msg %p enqueued. ref:%i, p:%p n:%p"
#define CREATED_DBG "msg %p created ref:%i, p:%p n:%p"
#define FREED_DBG "msg %p freed. ref:%i, p:%p n:%p"
#define RESERVED_DBG "msg %p reserved. ref:%i, p:%p n:%p"
#define RELEASED_DBG "msg %p released. ref:%i, p:%p n:%p"
//#define DEBUG_SHM_ALLOC 1
static ngx_http_push_channel_queue_t channel_gc_sentinel;
static ngx_slab_pool_t *ngx_http_push_shpool = NULL;
static ngx_shm_zone_t *ngx_http_push_shm_zone = NULL;
static ngx_int_t ngx_http_push_store_send_worker_message(ngx_http_push_channel_t *channel, ngx_http_push_subscriber_t *subscriber_sentinel, ngx_pid_t pid, ngx_int_t worker_slot, ngx_http_push_msg_t *msg, ngx_int_t status_code);
static ngx_int_t ngx_http_push_channel_collector(ngx_http_push_channel_t * channel) {
if((ngx_http_push_clean_channel_locked(channel))!=NULL) { //we're up for deletion
ngx_http_push_channel_queue_t *trashy;
if((trashy = ngx_alloc(sizeof(*trashy), ngx_cycle->log))!=NULL) {
//yeah, i'm allocating memory during garbage collection. sue me.
trashy->channel=channel;
ngx_queue_insert_tail(&channel_gc_sentinel.queue, &trashy->queue);
return NGX_OK;
}
return NGX_ERROR;
}
return NGX_OK;
}
static void ngx_http_push_store_lock_shmem(void){
ngx_shmtx_lock(&ngx_http_push_shpool->mutex);
}
static void ngx_http_push_store_unlock_shmem(void){
ngx_shmtx_unlock(&ngx_http_push_shpool->mutex);
}
//garbage-collecting slab allocator
static void * ngx_http_push_slab_alloc_locked(size_t size, char *label) {
void *p;
if((p = ngx_slab_alloc_locked(ngx_http_push_shpool, size))==NULL) {
ngx_http_push_channel_queue_t *ccur, *cnext;
ngx_uint_t collected = 0;
//failed. emergency garbage sweep, then.
//collect channels
ngx_queue_init(&channel_gc_sentinel.queue);
ngx_http_push_walk_rbtree(ngx_http_push_channel_collector, ngx_http_push_shm_zone);
for(ccur=(ngx_http_push_channel_queue_t *)ngx_queue_next(&channel_gc_sentinel.queue); ccur != &channel_gc_sentinel; ccur=cnext) {
cnext = (ngx_http_push_channel_queue_t *)ngx_queue_next(&ccur->queue);
ngx_http_push_delete_channel_locked(ccur->channel, ngx_http_push_shm_zone);
ngx_free(ccur);
collected++;
}
//todo: collect worker messages maybe
ngx_log_error(NGX_LOG_WARN, ngx_cycle->log, 0, "push module: out of shared memory. emergency garbage collection deleted %ui unused channels.", collected);
p = ngx_slab_alloc_locked(ngx_http_push_shpool, size);
}
#if (DEBUG_SHM_ALLOC == 1)
if (p != NULL) {
if(label==NULL)
label="none";
ngx_log_error(NGX_LOG_WARN, ngx_cycle->log, 0, "shpool alloc addr %p size %ui label %s", p, size, label);
}
#endif
return p;
}
static void * ngx_http_push_slab_alloc(size_t size, char *label) {
void * p;
ngx_http_push_store_lock_shmem();
p= ngx_http_push_slab_alloc_locked(size, label);
ngx_http_push_store_unlock_shmem();
return p;
}
static void ngx_http_push_slab_free_locked(void *ptr) {
ngx_slab_free_locked(ngx_http_push_shpool, ptr);
#if (DEBUG_SHM_ALLOC == 1)
ngx_log_error(NGX_LOG_WARN, ngx_cycle->log, 0, "shpool free addr %p", ptr);
#endif
}
/*
static void ngx_http_push_slab_free(void *ptr) {
ngx_http_push_store_lock_shmem();
ngx_http_push_slab_free_locked(ptr);
ngx_http_push_store_unlock_shmem();
}*/
//shpool is assumed to be locked.
static ngx_http_push_msg_t *ngx_http_push_get_latest_message_locked(ngx_http_push_channel_t * channel) {
ngx_queue_t *sentinel = &channel->message_queue->queue;
if(ngx_queue_empty(sentinel)) {
return NULL;
}
ngx_queue_t *qmsg = ngx_queue_last(sentinel);
return ngx_queue_data(qmsg, ngx_http_push_msg_t, queue);
}
//shpool must be locked. No memory is freed. O(1)
static ngx_http_push_msg_t *ngx_http_push_get_oldest_message_locked(ngx_http_push_channel_t * channel) {
ngx_queue_t *sentinel = &channel->message_queue->queue;
if(ngx_queue_empty(sentinel)) {
return NULL;
}
ngx_queue_t *qmsg = ngx_queue_head(sentinel);
return ngx_queue_data(qmsg, ngx_http_push_msg_t, queue);
}
static void ngx_http_push_store_reserve_message_locked(ngx_http_push_channel_t *channel, ngx_http_push_msg_t *msg) {
if(msg == NULL) {
return;
}
msg->refcount++;
//ngx_log_error(NGX_LOG_WARN, ngx_cycle->log, 0, RESERVED_DBG, msg, msg->refcount, msg->queue.prev, msg->queue.next);
//we need a refcount because channel messages MAY be dequed before they are used up. It thus falls on the IPC stuff to free it.
}
static void ngx_http_push_store_reserve_message_num_locked(ngx_http_push_channel_t *channel, ngx_http_push_msg_t *msg, ngx_int_t reservations) {
if(msg == NULL) {
return;
}
msg->refcount+=reservations;
//ngx_log_error(NGX_LOG_WARN, ngx_cycle->log, 0, RESERVED_DBG, msg, msg->refcount, msg->queue.prev, msg->queue.next);
//we need a refcount because channel messages MAY be dequed before they are used up. It thus falls on the IPC stuff to free it.
}
static void ngx_http_push_store_reserve_message(ngx_http_push_channel_t *channel, ngx_http_push_msg_t *msg) {
ngx_shmtx_lock(&ngx_http_push_shpool->mutex);
ngx_http_push_store_reserve_message_locked(channel, msg);
ngx_shmtx_unlock(&ngx_http_push_shpool->mutex);
//we need a refcount because channel messages MAY be dequed before they are used up. It thus falls on the IPC stuff to free it.
}
//free memory for a message.
static ngx_inline void ngx_http_push_free_message_locked(ngx_http_push_msg_t *msg, ngx_slab_pool_t *shpool) {
if(msg->buf->file!=NULL) {
// i'd like to release the shpool lock here while i do stuff to this file, but that
// might unlock during channel rbtree traversal, which is Bad News.
if(msg->buf->file->fd!=NGX_INVALID_FILE) {
ngx_close_file(msg->buf->file->fd);
}
ngx_delete_file(msg->buf->file->name.data); //should I care about deletion errors? doubt it.
}
ngx_http_push_slab_free_locked(msg->buf); //separate block, remember?
ngx_http_push_slab_free_locked(msg);
//ngx_log_error(NGX_LOG_WARN, ngx_cycle->log, 0, FREED_DBG, msg, msg->refcount, msg->queue.prev, msg->queue.next);
if(msg->refcount < 0) { //something worth exploring went wrong
raise(SIGSEGV);
}
((ngx_http_push_shm_data_t *) ngx_http_push_shm_zone->data)->messages--;
}
// remove a message from queue and free all associated memory. assumes shpool is already locked.
static ngx_int_t ngx_http_push_delete_message_locked(ngx_http_push_channel_t *channel, ngx_http_push_msg_t *msg, ngx_int_t force) {
if (msg==NULL) {
return NGX_OK;
}
if(channel!=NULL) {
ngx_queue_remove(&msg->queue);
channel->messages--;
}
if(msg->refcount<=0 || force) {
//nobody needs this message, or we were forced at integer-point to delete
ngx_http_push_free_message_locked(msg, ngx_http_push_shpool);
}
return NGX_OK;
}
static void ngx_http_push_store_release_message_locked(ngx_http_push_channel_t *channel, ngx_http_push_msg_t *msg) {
if(msg == NULL) {
return;
}
msg->refcount--;
//ngx_log_error(NGX_LOG_WARN, ngx_cycle->log, 0, RELEASED_DBG, msg, msg->refcount, msg->queue.prev, msg->queue.next);
if(msg->queue.next==NULL && msg->refcount<=0) {
//message had been dequeued and nobody needs it anymore
ngx_http_push_free_message_locked(msg, ngx_http_push_shpool);
}
if(channel != NULL && channel->messages > msg->delete_oldest_received_min_messages && ngx_http_push_get_oldest_message_locked(channel) == msg) {
ngx_http_push_delete_message_locked(channel, msg, 0);
}
}
static void ngx_http_push_store_release_message(ngx_http_push_channel_t *channel, ngx_http_push_msg_t *msg) {
ngx_shmtx_lock(&ngx_http_push_shpool->mutex);
ngx_http_push_store_release_message_locked(channel, msg);
ngx_shmtx_unlock(&ngx_http_push_shpool->mutex);
}
static ngx_int_t ngx_http_push_delete_message(ngx_http_push_channel_t *channel, ngx_http_push_msg_t *msg, ngx_int_t force) {
ngx_int_t ret;
ngx_shmtx_lock(&ngx_http_push_shpool->mutex);
ret = ngx_http_push_delete_message_locked(channel, msg, force);
ngx_shmtx_unlock(&ngx_http_push_shpool->mutex);
return ret;
}
/** find message with entity tags matching those of the request r.
* @param r subscriber request
*/
static ngx_http_push_msg_t * ngx_http_push_find_message_locked(ngx_http_push_channel_t *channel, ngx_http_push_msg_id_t *msgid, ngx_int_t *status) {
//TODO: consider using an RBTree for message storage.
ngx_queue_t *sentinel = &channel->message_queue->queue;
ngx_queue_t *cur = ngx_queue_head(sentinel);
ngx_http_push_msg_t *msg;
time_t time = msgid->time;
ngx_int_t tag = msgid->tag;
//channel's message buffer empty?
if(channel->messages==0) {
*status=NGX_HTTP_PUSH_MESSAGE_EXPECTED; //wait.
return NULL;
}
// do we want a future message?
msg = ngx_queue_data(sentinel->prev, ngx_http_push_msg_t, queue);
if(time <= msg->message_time) { //that's an empty check (Sentinel's values are zero)
if(time == msg->message_time) {
if(tag >= msg->message_tag) {
*status=NGX_HTTP_PUSH_MESSAGE_EXPECTED;
return NULL;
}
}
}
else {
*status=NGX_HTTP_PUSH_MESSAGE_EXPECTED;
return NULL;
}
while(cur!=sentinel) {
msg = ngx_queue_data(cur, ngx_http_push_msg_t, queue);
if (time < msg->message_time) {
*status = NGX_HTTP_PUSH_MESSAGE_FOUND;
return msg;
}
else if(time == msg->message_time) {
while (tag >= msg->message_tag && time == msg->message_time && ngx_queue_next(cur)!=sentinel) {
cur=ngx_queue_next(cur);
msg = ngx_queue_data(cur, ngx_http_push_msg_t, queue);
}
if(time == msg->message_time && tag < msg->message_tag) {
*status = NGX_HTTP_PUSH_MESSAGE_FOUND;
return msg;
}
continue;
}
cur=ngx_queue_next(cur);
}
*status = NGX_HTTP_PUSH_MESSAGE_EXPIRED; //message too old and was not found.
return NULL;
}
static ngx_http_push_channel_t * ngx_http_push_store_find_channel(ngx_str_t *id, time_t channel_timeout, ngx_int_t (*callback)(ngx_http_push_channel_t *channel)) {
//get the channel and check channel authorization while we're at it.
ngx_http_push_channel_t *channel;
ngx_shmtx_lock(&ngx_http_push_shpool->mutex);
channel = ngx_http_push_find_channel(id, channel_timeout, ngx_http_push_shm_zone);
ngx_shmtx_unlock(&ngx_http_push_shpool->mutex);
if(callback!=NULL) {
callback(channel);
}
return channel;
}
//temporary cheat
static ngx_int_t ngx_http_push_store_publish_raw(ngx_http_push_channel_t *channel, ngx_http_push_msg_t *msg, ngx_int_t status_code, const ngx_str_t *status_line) {
//subscribers are queued up in a local pool. Queue heads, however, are located
//in shared memory, identified by pid.
ngx_shmtx_lock(&ngx_http_push_shpool->mutex);
ngx_http_push_pid_queue_t *sentinel = channel->workers_with_subscribers;
ngx_http_push_subscriber_t *subscriber_sentinels[NGX_MAX_PROCESSES];
ngx_http_push_pid_queue_t *pid_queues[NGX_MAX_PROCESSES];
ngx_int_t sub_sentinel_count=0;
ngx_http_push_pid_queue_t *cur;
ngx_int_t i, received;
received = channel->subscribers > 0 ? NGX_HTTP_PUSH_MESSAGE_RECEIVED : NGX_HTTP_PUSH_MESSAGE_QUEUED;
//we need to reserve the message for all the workers in advance
for(cur=(ngx_http_push_pid_queue_t *)ngx_queue_next(&sentinel->queue); cur != sentinel; cur=(ngx_http_push_pid_queue_t *)ngx_queue_next(&cur->queue)) {
if(cur->subscriber_sentinel != NULL) {
pid_queues[sub_sentinel_count] = cur;
subscriber_sentinels[sub_sentinel_count] = cur->subscriber_sentinel;
/*
* each time all of a worker's subscribers are removed, so is the sentinel.
* this is done to make garbage collection easier. Assuming we want to avoid
* placing the sentinel in shared memory (for now -- it's a little tricky
* to debug), the owner of the worker pool must be the one to free said sentinel.
* But channels may be deleted by different worker processes, and it seems unwieldy
* (for now) to do IPC just to delete one stinkin' sentinel. Hence a new sentinel
* is used every time the subscriber queue is emptied.
*/
cur->subscriber_sentinel = NULL; //think about it it terms of garbage collection. it'll make sense. sort of.
sub_sentinel_count++;
}
}
if(sub_sentinel_count > 0) {
ngx_http_push_store_reserve_message_num_locked(channel, msg, sub_sentinel_count);
}
ngx_shmtx_unlock(&ngx_http_push_shpool->mutex);
ngx_http_push_subscriber_t *subscriber_sentinel=NULL;
for(i=0; i < sub_sentinel_count; i++) {
ngx_shmtx_lock(&ngx_http_push_shpool->mutex);
subscriber_sentinel = subscriber_sentinels[i];
pid_t worker_pid = pid_queues[i]->pid;
ngx_int_t worker_slot = pid_queues[i]->slot;
ngx_shmtx_unlock(&ngx_http_push_shpool->mutex);
//if(msg != NULL)
// ngx_log_error(NGX_LOG_WARN, ngx_cycle->log, 0, "publish msg %p (ref: %i) for worker %i (slot %i)", msg, msg->refcount, worker_pid, worker_slot);
if(subscriber_sentinel != NULL) {
if(worker_pid == ngx_pid) {
//my subscribers
ngx_http_push_respond_to_subscribers(channel, subscriber_sentinel, msg, status_code, status_line);
}
else {
//some other worker's subscribers
//interprocess communication breakdown
if(ngx_http_push_store_send_worker_message(channel, subscriber_sentinel, worker_pid, worker_slot, msg, status_code) != NGX_ERROR) {
ngx_http_push_alert_worker(worker_pid, worker_slot);
}
else {
ngx_log_error(NGX_LOG_ERR, ngx_cycle->log, 0, "push module: error communicating with some other worker process");
}
}
} else {
//ngx_log_error(NGX_LOG_WARN, ngx_cycle->log, 0, "subscriber sentinel is NULL");
}
}
return received;
}
static ngx_int_t ngx_http_push_store_delete_channel(ngx_str_t *channel_id) {
ngx_http_push_channel_t *channel;
ngx_http_push_msg_t *msg, *sentinel;
ngx_http_push_store_lock_shmem();
channel = ngx_http_push_find_channel(channel_id, NGX_HTTP_PUSH_DEFAULT_CHANNEL_TIMEOUT, ngx_http_push_shm_zone);
if (channel == NULL) {
ngx_http_push_store_unlock_shmem();
return NGX_OK;
}
sentinel = channel->message_queue;
msg = sentinel;
while((msg=(ngx_http_push_msg_t *)ngx_queue_next(&msg->queue))!=sentinel) {
//force-delete all the messages
ngx_http_push_delete_message_locked(NULL, msg, 1);
}
channel->messages=0;
//410 gone
ngx_http_push_store_unlock_shmem();
ngx_http_push_store_publish_raw(channel, NULL, NGX_HTTP_GONE, &NGX_HTTP_PUSH_HTTP_STATUS_410);
ngx_http_push_store_lock_shmem();
ngx_http_push_delete_channel_locked(channel, ngx_http_push_shm_zone);
ngx_http_push_store_unlock_shmem();
return NGX_OK;
}
static ngx_http_push_channel_t * ngx_http_push_store_get_channel(ngx_str_t *id, time_t channel_timeout, ngx_int_t (*callback)(ngx_http_push_channel_t *channel)) {
//get the channel and check channel authorization while we're at it.
ngx_http_push_channel_t *channel;
ngx_shmtx_lock(&ngx_http_push_shpool->mutex);
channel = ngx_http_push_get_channel(id, channel_timeout, ngx_http_push_shm_zone);
ngx_shmtx_unlock(&ngx_http_push_shpool->mutex);
if(channel==NULL) {
ngx_log_error(NGX_LOG_ERR, ngx_cycle->log, 0, "push module: unable to allocate memory for new channel");
}
if(callback!=NULL) {
callback(channel);
}
return channel;
}
static ngx_http_push_msg_t * ngx_http_push_store_get_channel_message(ngx_http_push_channel_t *channel, ngx_http_push_msg_id_t *msgid, ngx_int_t *msg_search_outcome, ngx_http_push_loc_conf_t *cf) {
ngx_http_push_msg_t *msg;
ngx_shmtx_lock(&ngx_http_push_shpool->mutex);
msg = ngx_http_push_find_message_locked(channel, msgid, msg_search_outcome);
if(*msg_search_outcome == NGX_HTTP_PUSH_MESSAGE_FOUND) {
ngx_http_push_store_reserve_message_locked(channel, msg);
}
channel->last_seen = ngx_time();
channel->expires = ngx_time() + cf->channel_timeout;
ngx_shmtx_unlock(&ngx_http_push_shpool->mutex);
return msg;
}
static ngx_int_t default_get_message_callback(ngx_http_push_msg_t *msg, ngx_int_t msg_search_outcome, ngx_http_request_t *r) {
return NGX_OK;
}
static ngx_http_push_msg_t * ngx_http_push_store_get_message(ngx_str_t *channel_id, ngx_http_push_msg_id_t *msg_id, ngx_int_t *msg_search_outcome, ngx_http_request_t *r, ngx_int_t (*callback)(ngx_http_push_msg_t *msg, ngx_int_t msg_search_outcome, ngx_http_request_t *r)) {
ngx_http_push_channel_t *channel;
ngx_http_push_msg_t *msg;
if(callback==NULL) {
callback=&default_get_message_callback;
}
ngx_http_push_store_lock_shmem();
channel = ngx_http_push_get_channel(channel_id, NGX_HTTP_PUSH_DEFAULT_CHANNEL_TIMEOUT, ngx_http_push_shm_zone);
ngx_http_push_store_unlock_shmem();
if (channel == NULL) {
return NULL;
}
msg = ngx_http_push_store_get_channel_message(channel, msg_id, msg_search_outcome, ngx_http_get_module_loc_conf(r, ngx_http_push_module));
callback(msg, *msg_search_outcome, r);
return msg;
}
// shared memory zone initializer
static ngx_int_t ngx_http_push_init_shm_zone(ngx_shm_zone_t * shm_zone, void *data) {
if(data) { /* zone already initialized */
shm_zone->data = data;
return NGX_OK;
}
ngx_slab_pool_t *shpool = (ngx_slab_pool_t *) shm_zone->shm.addr;
ngx_rbtree_node_t *sentinel;
ngx_http_push_shm_data_t *d;
ngx_http_push_shpool = shpool; //we'll be using this a bit.
#if (DEBUG_SHM_ALLOC == 1)
ngx_log_error(NGX_LOG_WARN, ngx_cycle->log, 0, "ngx_http_push_shpool start %p size %i", shpool->start, (u_char *)shpool->end - (u_char *)shpool->start);
#endif
if ((d = (ngx_http_push_shm_data_t *)ngx_http_push_slab_alloc(sizeof(*d), "shm data")) == NULL) { //shm_data
return NGX_ERROR;
}
d->channels=0;
d->messages=0;
shm_zone->data = d;
d->ipc=NULL;
//initialize rbtree
if ((sentinel = ngx_http_push_slab_alloc(sizeof(*sentinel), "channel rbtree sentinel"))==NULL) {
return NGX_ERROR;
}
ngx_rbtree_init(&d->tree, sentinel, ngx_http_push_rbtree_insert);
return NGX_OK;
}
//shared memory
static ngx_str_t ngx_push_shm_name = ngx_string("push_module"); //shared memory segment name
static ngx_int_t ngx_http_push_set_up_shm(ngx_conf_t *cf, size_t shm_size) {
ngx_http_push_shm_zone = ngx_shared_memory_add(cf, &ngx_push_shm_name, shm_size, &ngx_http_push_module);
if (ngx_http_push_shm_zone == NULL) {
return NGX_ERROR;
}
ngx_http_push_shm_zone->init = ngx_http_push_init_shm_zone;
ngx_http_push_shm_zone->data = (void *) 1;
return NGX_OK;
}
//initialization
static ngx_int_t ngx_http_push_store_init_module(ngx_cycle_t *cycle) {
ngx_core_conf_t *ccf = (ngx_core_conf_t *) ngx_get_conf(cycle->conf_ctx, ngx_core_module);
ngx_http_push_worker_processes = ccf->worker_processes;
//initialize our little IPC
return ngx_http_push_init_ipc(cycle, ngx_http_push_worker_processes);
}
//will be called once per worker
static ngx_int_t ngx_http_push_store_init_ipc_shm(ngx_int_t workers) {
ngx_slab_pool_t *shpool = (ngx_slab_pool_t *) ngx_http_push_shm_zone->shm.addr;
ngx_http_push_shm_data_t *d = (ngx_http_push_shm_data_t *) ngx_http_push_shm_zone->data;
ngx_http_push_worker_msg_sentinel_t *worker_messages=NULL;
ngx_shmtx_lock(&shpool->mutex);
if(d->ipc==NULL) {
//ipc uninitialized. get it done!
if((worker_messages = ngx_http_push_slab_alloc_locked(sizeof(*worker_messages)*NGX_MAX_PROCESSES, "IPC worker message sentinel array"))==NULL) {
ngx_shmtx_unlock(&shpool->mutex);
return NGX_ERROR;
}
d->ipc=worker_messages;
}
else {
worker_messages=d->ipc;
}
ngx_queue_init(&worker_messages[ngx_process_slot].queue);
ngx_rwlock_init(&worker_messages[ngx_process_slot].lock);
ngx_shmtx_unlock(&shpool->mutex);
return NGX_OK;
}
static ngx_int_t ngx_http_push_store_init_worker(ngx_cycle_t *cycle) {
ngx_core_conf_t *ccf = (ngx_core_conf_t *) ngx_get_conf(cycle->conf_ctx, ngx_core_module);
if(ngx_http_push_store_init_ipc_shm(ccf->worker_processes) == NGX_OK) {
return ngx_http_push_ipc_init_worker(cycle);
}
else {
return NGX_ERROR;
}
}
static ngx_int_t ngx_http_push_store_init_postconfig(ngx_conf_t *cf) {
ngx_http_push_main_conf_t *conf = ngx_http_conf_get_module_main_conf(cf, ngx_http_push_module);
//initialize shared memory
size_t shm_size;
if(conf->shm_size==NGX_CONF_UNSET_SIZE) {
conf->shm_size=NGX_HTTP_PUSH_DEFAULT_SHM_SIZE;
}
shm_size = ngx_align(conf->shm_size, ngx_pagesize);
if (shm_size < 8 * ngx_pagesize) {
ngx_conf_log_error(NGX_LOG_WARN, cf, 0, "The push_max_reserved_memory value must be at least %udKiB", (8 * ngx_pagesize) >> 10);
shm_size = 8 * ngx_pagesize;
}
if(ngx_http_push_shm_zone && ngx_http_push_shm_zone->shm.size != shm_size) {
ngx_conf_log_error(NGX_LOG_WARN, cf, 0, "Cannot change memory area size without restart, ignoring change");
}
ngx_conf_log_error(NGX_LOG_INFO, cf, 0, "Using %udKiB of shared memory for push module", shm_size >> 10);
return ngx_http_push_set_up_shm(cf, shm_size);
}
static void ngx_http_push_store_create_main_conf(ngx_conf_t *cf, ngx_http_push_main_conf_t *mcf) {
mcf->shm_size=NGX_CONF_UNSET_SIZE;
}
//great justice appears to be at hand
static ngx_int_t ngx_http_push_movezig_channel_locked(ngx_http_push_channel_t * channel) {
ngx_queue_t *sentinel = &channel->message_queue->queue;
ngx_http_push_msg_t *msg=NULL;
while(!ngx_queue_empty(sentinel)) {
msg = ngx_queue_data(ngx_queue_head(sentinel), ngx_http_push_msg_t, queue);
ngx_http_push_delete_message_locked(channel, msg, 1);
}
return NGX_OK;
}
static ngx_int_t ngx_http_push_store_channel_subscribers(ngx_http_push_channel_t * channel) {
ngx_int_t subs;
ngx_shmtx_lock(&ngx_http_push_shpool->mutex);
subs = channel->subscribers;
ngx_shmtx_unlock(&ngx_http_push_shpool->mutex);
return subs;
}
static ngx_int_t ngx_http_push_store_channel_worker_subscribers(ngx_http_push_subscriber_t * worker_sentinel) {
ngx_http_push_subscriber_t *cur;
ngx_int_t count=0;
cur=(ngx_http_push_subscriber_t *)ngx_queue_head(&worker_sentinel->queue);
while(cur!=worker_sentinel) {
count++;
cur=(ngx_http_push_subscriber_t *)ngx_queue_next(&cur->queue);
}
return count;
}
static ngx_http_push_subscriber_t *ngx_http_push_store_channel_next_subscriber(ngx_http_push_channel_t *channel, ngx_http_push_subscriber_t *sentinel, ngx_http_push_subscriber_t *cur, int release_previous) {
ngx_http_push_subscriber_t *next;
if(cur==NULL) {
next=(ngx_http_push_subscriber_t *)ngx_queue_head(&sentinel->queue);
}
else{
next=(ngx_http_push_subscriber_t *)ngx_queue_next(&cur->queue);
if(release_previous==1 && cur!=sentinel) {
//ngx_log_error(NGX_LOG_WARN, ngx_cycle->log, 0, "freeing subscriber cursor at %p.", cur);
ngx_pfree(ngx_http_push_pool, cur);
}
}
return next!=sentinel ? next : NULL;
}
static ngx_int_t ngx_http_push_store_channel_release_subscriber_sentinel(ngx_http_push_channel_t *channel, ngx_http_push_subscriber_t *sentinel) {
//ngx_log_error(NGX_LOG_WARN, ngx_cycle->log, 0, "freeing subscriber sentinel at %p.", sentinel);
ngx_pfree(ngx_http_push_pool, sentinel);
return NGX_OK;
}
static void ngx_http_push_store_exit_worker(ngx_cycle_t *cycle) {
ngx_http_push_ipc_exit_worker(cycle);
}
static void ngx_http_push_store_exit_master(ngx_cycle_t *cycle) {
//destroy channel tree in shared memory
ngx_http_push_walk_rbtree(ngx_http_push_movezig_channel_locked, ngx_http_push_shm_zone);
//deinitialize IPC
ngx_http_push_shutdown_ipc(cycle);
}
static ngx_http_push_subscriber_t * ngx_http_push_store_subscribe_raw(ngx_http_push_channel_t *channel, ngx_http_request_t *r) {
ngx_http_push_pid_queue_t *sentinel, *cur, *found;
ngx_http_push_subscriber_t *subscriber;
ngx_http_push_subscriber_t *subscriber_sentinel;
//ngx_http_push_loc_conf_t *cf = ngx_http_get_module_loc_conf(r, ngx_http_push_module);
//subscribers are queued up in a local pool. Queue sentinels are separate and also local, but not in the pool.
ngx_shmtx_lock(&ngx_http_push_shpool->mutex);
sentinel = channel->workers_with_subscribers;
cur = (ngx_http_push_pid_queue_t *)ngx_queue_head(&sentinel->queue);
found = NULL;
while(cur!=sentinel) {
if(cur->pid==ngx_pid) {
found = cur;
break;
}
cur = (ngx_http_push_pid_queue_t *)ngx_queue_next(&cur->queue);
}
if(found == NULL) { //found nothing
if((found=ngx_http_push_slab_alloc_locked(sizeof(*found), "worker subscriber sentinel"))==NULL) {
ngx_shmtx_unlock(&ngx_http_push_shpool->mutex);
ngx_log_error(NGX_LOG_ERR, r->connection->log, 0, "push module: unable to allocate worker subscriber queue marker in shared memory");
return NULL;
}
//initialize
ngx_queue_insert_tail(&sentinel->queue, &found->queue);
found->pid=ngx_pid;
found->slot=ngx_process_slot;
found->subscriber_sentinel=NULL;
}
if((subscriber = ngx_palloc(ngx_http_push_pool, sizeof(*subscriber)))==NULL) { //unable to allocate request queue element
ngx_shmtx_unlock(&ngx_http_push_shpool->mutex);
ngx_log_error(NGX_LOG_ERR, r->connection->log, 0, "push module: unable to allocate subscriber worker's memory pool");
return NULL;
}
channel->subscribers++; // do this only when we know everything went okay.
//figure out the subscriber sentinel
subscriber_sentinel = ((ngx_http_push_pid_queue_t *)found)->subscriber_sentinel;
//ngx_log_error(NGX_LOG_WARN, ngx_cycle->log, 0, "reserve subscriber sentinel at %p", subscriber_sentinel);
if(subscriber_sentinel==NULL) {
//it's perfectly normal for the sentinel to be NULL.
if((subscriber_sentinel=ngx_palloc(ngx_http_push_pool, sizeof(*subscriber_sentinel)))==NULL) {
ngx_log_error(NGX_LOG_ERR, r->connection->log, 0, "push module: unable to allocate channel subscriber sentinel");
return NULL;
}
ngx_queue_init(&subscriber_sentinel->queue);
((ngx_http_push_pid_queue_t *)found)->subscriber_sentinel=subscriber_sentinel;
}
//ngx_log_error(NGX_LOG_WARN, ngx_cycle->log, 0, "add to subscriber sentinel at %p", subscriber_sentinel);
ngx_queue_insert_tail(&subscriber_sentinel->queue, &subscriber->queue);
ngx_shmtx_unlock(&ngx_http_push_shpool->mutex);
subscriber->request = r;
return subscriber;
}
static ngx_int_t ngx_http_push_handle_subscriber_concurrency(ngx_http_push_channel_t *channel, ngx_http_request_t *r, ngx_http_push_loc_conf_t *cf) {
ngx_int_t max_subscribers = cf->max_channel_subscribers;
ngx_int_t current_subscribers = ngx_http_push_store->channel_subscribers(channel) ;
if(current_subscribers==0) {
//empty channels are always okay.
return NGX_OK;
}
if(max_subscribers!=0 && current_subscribers >= max_subscribers) {
//max_channel_subscribers setting
ngx_http_push_respond_status_only(r, NGX_HTTP_FORBIDDEN, NULL);
return NGX_DECLINED;
}
//nonzero number of subscribers present
switch(cf->subscriber_concurrency) {
case NGX_HTTP_PUSH_SUBSCRIBER_CONCURRENCY_BROADCAST:
return NGX_OK;
case NGX_HTTP_PUSH_SUBSCRIBER_CONCURRENCY_LASTIN:
//send "everyone" a 409 Conflict response.
//in most reasonable cases, there'll be at most one subscriber on the
//channel. However, since settings are bound to locations and not
//specific channels, this assumption need not hold. Hence this broadcast.
ngx_http_push_store_publish_raw(channel, NULL, NGX_HTTP_NOT_FOUND, &NGX_HTTP_PUSH_HTTP_STATUS_409);
return NGX_OK;
case NGX_HTTP_PUSH_SUBSCRIBER_CONCURRENCY_FIRSTIN:
ngx_http_push_respond_status_only(r, NGX_HTTP_NOT_FOUND, &NGX_HTTP_PUSH_HTTP_STATUS_409);
return NGX_DECLINED;
default:
return NGX_ERROR;
}
}
static ngx_int_t default_subscribe_callback(ngx_int_t status, ngx_http_request_t *r) {
return status;
}
static ngx_int_t ngx_http_push_store_subscribe(ngx_str_t *channel_id, ngx_http_push_msg_id_t *msg_id, ngx_http_request_t *r, ngx_int_t (*callback)(ngx_int_t status, ngx_http_request_t *r)) {
ngx_http_push_channel_t *channel;
ngx_http_push_msg_t *msg;
ngx_int_t msg_search_outcome;
ngx_http_push_loc_conf_t *cf = ngx_http_get_module_loc_conf(r, ngx_http_push_module);
if(callback == NULL) {
callback=&default_subscribe_callback;
}
if (cf->authorize_channel==1) {
channel = ngx_http_push_store_find_channel(channel_id, cf->channel_timeout, NULL);
}else{
channel = ngx_http_push_store_get_channel(channel_id, cf->channel_timeout, NULL);
}
if (channel==NULL) {
//unable to allocate channel OR channel not found
if(cf->authorize_channel) {
return callback(NGX_HTTP_FORBIDDEN, r);
}
else {
ngx_log_error(NGX_LOG_ERR, ngx_cycle->log, 0, "push module: unable to allocate shared memory for channel");
return callback(NGX_HTTP_INTERNAL_SERVER_ERROR, r);
}
}
switch(ngx_http_push_handle_subscriber_concurrency(channel, r, cf)) {
case NGX_DECLINED: //this request was declined for some reason.
//status codes and whatnot should have already been written. just get out of here quickly.
return callback(NGX_OK, r);
case NGX_ERROR:
ngx_log_error(NGX_LOG_ERR, r->connection->log, 0, "push module: error handling subscriber concurrency setting");
return callback(NGX_ERROR, r);
}
msg = ngx_http_push_store->get_channel_message(channel, msg_id, &msg_search_outcome, cf);
if (cf->ignore_queue_on_no_cache && !ngx_http_push_allow_caching(r)) {
msg_search_outcome = NGX_HTTP_PUSH_MESSAGE_EXPECTED;
msg = NULL;
}
switch(msg_search_outcome) {
//for message-found:
ngx_str_t *etag;
ngx_str_t *content_type;
ngx_chain_t *chain;
time_t last_modified;
ngx_http_push_subscriber_t *subscriber;
case NGX_HTTP_PUSH_MESSAGE_EXPECTED:
// ♫ It's gonna be the future soon ♫
if ((subscriber = ngx_http_push_store_subscribe_raw(channel, r))==NULL) {
return callback(NGX_HTTP_INTERNAL_SERVER_ERROR, r);
}
if(ngx_push_longpoll_subscriber_enqueue(channel, subscriber, cf->subscriber_timeout) == NGX_OK) {
return callback(NGX_DONE, r);
}
else {
return callback(NGX_ERROR, r);
}
case NGX_HTTP_PUSH_MESSAGE_EXPIRED:
//subscriber wants an expired message
//TODO: maybe respond with entity-identifiers for oldest available message?
return callback(NGX_HTTP_NO_CONTENT, r);
case NGX_HTTP_PUSH_MESSAGE_FOUND:
ngx_http_push_alloc_for_subscriber_response(r->pool, 0, msg, &chain, &content_type, &etag, &last_modified);
ngx_int_t ret=ngx_http_push_prepare_response_to_subscriber_request(r, chain, content_type, etag, last_modified);
ngx_http_push_store->release_message(channel, msg);
return callback(ret, r);
default: //we shouldn't be here.
return callback(NGX_HTTP_INTERNAL_SERVER_ERROR, r);
}
}
static ngx_str_t * ngx_http_push_store_etag_from_message(ngx_http_push_msg_t *msg, ngx_pool_t *pool){
ngx_str_t *etag;
ngx_shmtx_lock(&ngx_http_push_shpool->mutex);
if(pool!=NULL && (etag = ngx_palloc(pool, sizeof(*etag) + NGX_INT_T_LEN))==NULL) {
return NULL;
}
else if(pool==NULL && (etag = ngx_alloc(sizeof(*etag) + NGX_INT_T_LEN, ngx_cycle->log))==NULL) {
return NULL;
}
etag->data = (u_char *)(etag+1);
etag->len = ngx_sprintf(etag->data,"%ui", msg->message_tag)- etag->data;
ngx_shmtx_unlock(&ngx_http_push_shpool->mutex);
return etag;
}
static ngx_str_t * ngx_http_push_store_content_type_from_message(ngx_http_push_msg_t *msg, ngx_pool_t *pool){
ngx_str_t *content_type;
ngx_shmtx_lock(&ngx_http_push_shpool->mutex);
if(pool != NULL && (content_type = ngx_palloc(pool, sizeof(*content_type) + msg->content_type.len))==NULL) {
return NULL;
}
else if(pool == NULL && (content_type = ngx_alloc(sizeof(*content_type) + msg->content_type.len, ngx_cycle->log))==NULL) {
return NULL;
}
content_type->data = (u_char *)(content_type+1);
content_type->len = msg->content_type.len;
ngx_memcpy(content_type->data, msg->content_type.data, content_type->len);
ngx_shmtx_unlock(&ngx_http_push_shpool->mutex);
return content_type;
}
// this function adapted from push stream module. thanks Wandenberg Peixoto <wandenberg@gmail.com> and Rogério Carvalho Schneider <stockrt@gmail.com>
static ngx_buf_t * ngx_http_push_request_body_to_single_buffer(ngx_http_request_t *r) {
ngx_buf_t *buf = NULL;
ngx_chain_t *chain;
ssize_t n;
off_t len;
chain = r->request_body->bufs;
if (chain->next == NULL) {
return chain->buf;
}
if (chain->buf->in_file) {
if (ngx_buf_in_memory(chain->buf)) {
ngx_log_error(NGX_LOG_ERR, r->connection->log, 0, "push module: can't handle a buffer in a temp file and in memory ");
}
if (chain->next != NULL) {
ngx_log_error(NGX_LOG_ERR, r->connection->log, 0, "push module: error reading request body with multiple ");
}
return chain->buf;
}
buf = ngx_create_temp_buf(r->pool, r->headers_in.content_length_n + 1);
if (buf != NULL) {
ngx_memset(buf->start, '\0', r->headers_in.content_length_n + 1);
while ((chain != NULL) && (chain->buf != NULL)) {
len = ngx_buf_size(chain->buf);
// if buffer is equal to content length all the content is in this buffer
if (len >= r->headers_in.content_length_n) {
buf->start = buf->pos;
buf->last = buf->pos;
len = r->headers_in.content_length_n;
}
if (chain->buf->in_file) {
n = ngx_read_file(chain->buf->file, buf->start, len, 0);
if (n == NGX_FILE_ERROR) {
ngx_log_error(NGX_LOG_ERR, r->connection->log, 0, "push module: cannot read file with request body");
return NULL;
}
buf->last = buf->last + len;
ngx_delete_file(chain->buf->file->name.data);
chain->buf->file->fd = NGX_INVALID_FILE;
} else {
buf->last = ngx_copy(buf->start, chain->buf->pos, len);
}
chain = chain->next;
buf->start = buf->last;
}
}
return buf;
}
static ngx_http_push_msg_t * ngx_http_push_store_create_message(ngx_http_push_channel_t *channel, ngx_http_request_t *r) {
ngx_buf_t *buf = NULL, *buf_copy;
size_t content_type_len;
ngx_http_push_loc_conf_t *cf = ngx_http_get_module_loc_conf(r, ngx_http_push_module);
ngx_http_push_msg_t *msg, *previous_msg;
//first off, we'll want to extract the body buffer
//note: this works mostly because of r->request_body_in_single_buf = 1;
//which, i suppose, makes this module a little slower than it could be.
//this block is a little hacky. might be a thorn for forward-compatibility.
if(r->headers_in.content_length_n == -1 || r->headers_in.content_length_n == 0) {
buf = ngx_create_temp_buf(r->pool, 0);
//this buffer will get copied to shared memory in a few lines,
//so it does't matter what pool we make it in.
}
else if(r->request_body->bufs!=NULL) {
buf = ngx_http_push_request_body_to_single_buffer(r);
}
else {
ngx_log_error(NGX_LOG_ERR, (r)->connection->log, 0, "push module: unexpected publisher message request body buffer location. please report this to the push module developers.");
return NULL;
}
NGX_HTTP_PUSH_BROADCAST_CHECK(buf, NULL, r, "push module: can't find or allocate publisher request body buffer");
content_type_len = (r->headers_in.content_type!=NULL ? r->headers_in.content_type->value.len : 0);
ngx_shmtx_lock(&ngx_http_push_shpool->mutex);
//create a buffer copy in shared mem
msg = ngx_http_push_slab_alloc_locked(sizeof(*msg) + content_type_len, "message + content_type");
NGX_HTTP_PUSH_BROADCAST_CHECK_LOCKED(msg, NULL, r, "push module: unable to allocate message in shared memory", ngx_http_push_shpool);
previous_msg=ngx_http_push_get_latest_message_locked(channel); //need this for entity-tags generation
buf_copy = ngx_http_push_slab_alloc_locked(NGX_HTTP_BUF_ALLOC_SIZE(buf), "message buffer copy");
NGX_HTTP_PUSH_BROADCAST_CHECK_LOCKED(buf_copy, NULL, r, "push module: unable to allocate buffer in shared memory", ngx_http_push_shpool) //magic nullcheck
ngx_http_push_copy_preallocated_buffer(buf, buf_copy);
msg->buf=buf_copy;
//Stamp the new message with entity tags
msg->message_time=ngx_time(); //ESSENTIAL TODO: make sure this ends up producing GMT time
msg->message_tag=(previous_msg!=NULL && msg->message_time == previous_msg->message_time) ? (previous_msg->message_tag + 1) : 0;
//store the content-type
if(content_type_len>0) {
msg->content_type.len=r->headers_in.content_type->value.len;
msg->content_type.data=(u_char *) (msg+1); //we had reserved a contiguous chunk, myes?
ngx_memcpy(msg->content_type.data, r->headers_in.content_type->value.data, msg->content_type.len);
}
else {
msg->content_type.len=0;
msg->content_type.data=NULL;
}
//queue stuff ought to be NULL
msg->queue.prev=NULL;
msg->queue.next=NULL;
msg->refcount=0;
//set message expiration time
time_t message_timeout = cf->buffer_timeout;
msg->expires = (message_timeout==0 ? 0 : (ngx_time() + message_timeout));
msg->delete_oldest_received_min_messages = cf->delete_oldest_received_message ? (ngx_uint_t) cf->min_messages : NGX_MAX_UINT32_VALUE;
//NGX_MAX_UINT32_VALUE to disable, otherwise = min_message_buffer_size of the publisher location from whence the message came
ngx_shmtx_unlock(&ngx_http_push_shpool->mutex);
//ngx_log_error(NGX_LOG_WARN, ngx_cycle->log, 0, CREATED_DBG, msg, msg->refcount, msg->queue.prev, msg->queue.next);
return msg;
}
static ngx_int_t ngx_http_push_store_enqueue_message(ngx_http_push_channel_t *channel, ngx_http_push_msg_t *msg, ngx_http_push_loc_conf_t *cf) {
ngx_shmtx_lock(&ngx_http_push_shpool->mutex);
ngx_queue_insert_tail(&channel->message_queue->queue, &msg->queue);
channel->messages++;
//now see if the queue is too big
if(channel->messages > (ngx_uint_t) cf->max_messages) {
//exceeeds max queue size. don't force it, someone might still be using this message.
ngx_http_push_delete_message_locked(channel, ngx_http_push_get_oldest_message_locked(channel), 0);
}
if(channel->messages > (ngx_uint_t) cf->min_messages) {
//exceeeds min queue size. maybe delete the oldest message
//no, don't do anything for now. This feature is badly implemented and I think I'll deprecate it.
}
ngx_shmtx_unlock(&ngx_http_push_shpool->mutex);
//ngx_log_error(NGX_LOG_WARN, ngx_cycle->log, 0, ENQUEUED_DBG, msg, msg->refcount, msg->queue.prev, msg->queue.next);
return NGX_OK;
}
static ngx_int_t default_publish_callback(ngx_int_t status, ngx_http_push_channel_t *ch, ngx_http_request_t *r) {
return status;
}
static ngx_int_t ngx_http_push_store_publish_message(ngx_str_t *channel_id, ngx_http_request_t *r, ngx_int_t (*callback)(ngx_int_t status, ngx_http_push_channel_t *ch, ngx_http_request_t *r)) {
ngx_http_push_channel_t *channel;
ngx_http_push_msg_t *msg;
ngx_http_push_loc_conf_t *cf = ngx_http_get_module_loc_conf(r, ngx_http_push_module);
ngx_int_t result=0;
if(callback==NULL) {
callback=&default_publish_callback;
}
if((channel=ngx_http_push_store_get_channel(channel_id, cf->channel_timeout, NULL))==NULL) { //always returns a channel, unless no memory left
return callback(NGX_ERROR, NULL, r);
//ngx_http_finalize_request(r, NGX_HTTP_INTERNAL_SERVER_ERROR);
}
if((msg = ngx_http_push_store_create_message(channel, r))==NULL) {
return callback(NGX_ERROR, channel, r);
//ngx_http_finalize_request(r, NGX_HTTP_INTERNAL_SERVER_ERROR);
}
if(cf->max_messages > 0) { //channel buffers exist
ngx_http_push_store_enqueue_message(channel, msg, cf);
}
else if(cf->max_messages == 0) {
ngx_http_push_store_reserve_message(NULL, msg);
}
result= ngx_http_push_store_publish_raw(channel, msg, 0, NULL);
return callback(result, channel, r);
}
static ngx_int_t ngx_http_push_store_send_worker_message(ngx_http_push_channel_t *channel, ngx_http_push_subscriber_t *subscriber_sentinel, ngx_pid_t pid, ngx_int_t worker_slot, ngx_http_push_msg_t *msg, ngx_int_t status_code) {
ngx_http_push_worker_msg_sentinel_t *worker_messages = ((ngx_http_push_shm_data_t *)ngx_http_push_shm_zone->data)->ipc;
ngx_http_push_worker_msg_sentinel_t *sentinel = &worker_messages[worker_slot];
ngx_http_push_worker_msg_t *newmessage;
if((newmessage=ngx_http_push_slab_alloc(sizeof(*newmessage), "IPC worker message"))==NULL) {
ngx_log_error(NGX_LOG_ERR, ngx_cycle->log, 0, "push module: unable to allocate worker message");
return NGX_ERROR;
}
newmessage->msg = msg;
newmessage->status_code = status_code;
newmessage->pid = pid;
newmessage->subscriber_sentinel = subscriber_sentinel;
newmessage->channel = channel;
ngx_http_push_store_lock_shmem();
ngx_queue_insert_tail(&sentinel->queue, &newmessage->queue);
ngx_http_push_store_unlock_shmem();
return NGX_OK;
}
static void ngx_http_push_store_receive_worker_message(void) {
ngx_http_push_worker_msg_t *prev_worker_msg, *worker_msg;
ngx_http_push_worker_msg_sentinel_t *sentinel;
const ngx_str_t *status_line = NULL;
ngx_http_push_channel_t *channel;
ngx_http_push_subscriber_t *subscriber_sentinel;
ngx_int_t worker_msg_pid;
ngx_int_t status_code;
ngx_http_push_msg_t *msg;
sentinel = &(((ngx_http_push_shm_data_t *)ngx_http_push_shm_zone->data)->ipc)[ngx_process_slot];
ngx_http_push_store_lock_shmem();
worker_msg = (ngx_http_push_worker_msg_t *)ngx_queue_next(&sentinel->queue);
ngx_http_push_store_unlock_shmem();
while((void *)worker_msg != (void *)sentinel) {
ngx_http_push_store_lock_shmem();
worker_msg_pid = worker_msg->pid;
ngx_http_push_store_unlock_shmem();
if(worker_msg_pid == ngx_pid) {
//ngx_log_error(NGX_LOG_WARN, ngx_cycle->log, 0, "process_worker_message processing proper worker_msg ");
//everything is okay.
ngx_http_push_store_lock_shmem();
status_code = worker_msg->status_code;
msg = worker_msg->msg;
channel = worker_msg->channel;
subscriber_sentinel = worker_msg->subscriber_sentinel;
ngx_http_push_store_unlock_shmem();
if(msg==NULL) {
//just a status line, is all
//status code only.
switch(status_code) {
case NGX_HTTP_CONFLICT:
status_line=&NGX_HTTP_PUSH_HTTP_STATUS_409;
break;
case NGX_HTTP_GONE:
status_line=&NGX_HTTP_PUSH_HTTP_STATUS_410;
break;
case 0:
ngx_log_error(NGX_LOG_ERR, ngx_cycle->log, 0, "push module: worker message contains neither a channel message nor a status code");
//let's let the subscribers know that something went wrong and they might've missed a message
status_code = NGX_HTTP_INTERNAL_SERVER_ERROR;
//intentional fall-through
default:
status_line=NULL;
}
}
ngx_http_push_respond_to_subscribers(channel, subscriber_sentinel, msg, status_code, status_line);
}
else {
//that's quite bad you see. a previous worker died with an undelivered message.
//but all its subscribers' connections presumably got canned, too. so it's not so bad after all.
//ngx_log_error(NGX_LOG_WARN, ngx_cycle->log, 0, "process_worker_message processing INVALID worker_msg ");
ngx_http_push_store_lock_shmem();
ngx_http_push_pid_queue_t *channel_worker_sentinel = worker_msg->channel->workers_with_subscribers;
ngx_http_push_pid_queue_t *channel_worker_cur = channel_worker_sentinel;
ngx_log_error(NGX_LOG_ERR, ngx_cycle->log, 0, "push module: worker %i intercepted a message intended for another worker process (%i) that probably died", ngx_pid, worker_msg->pid);
//delete that invalid sucker.
while((channel_worker_cur=(ngx_http_push_pid_queue_t *)ngx_queue_next(&channel_worker_cur->queue))!=channel_worker_sentinel) {
if(channel_worker_cur->pid == worker_msg->pid) {
ngx_queue_remove(&channel_worker_cur->queue);
ngx_http_push_slab_free_locked(channel_worker_cur);
break;
}
}
ngx_http_push_store_unlock_shmem();
}
//It may be worth it to memzero worker_msg for debugging purposes.
prev_worker_msg = worker_msg;
ngx_http_push_store_lock_shmem();
worker_msg = (ngx_http_push_worker_msg_t *)ngx_queue_next(&worker_msg->queue);
ngx_http_push_slab_free_locked(prev_worker_msg);
ngx_http_push_store_unlock_shmem();
}
ngx_http_push_store_lock_shmem();
ngx_queue_init(&sentinel->queue); //reset the worker message sentinel
ngx_http_push_store_unlock_shmem();
//ngx_log_error(NGX_LOG_WARN, ngx_cycle->log, 0, "process_worker_message finished");
return;
}
ngx_http_push_store_t ngx_http_push_store_memory = {
//init
&ngx_http_push_store_init_module,
&ngx_http_push_store_init_worker,
&ngx_http_push_store_init_postconfig,
&ngx_http_push_store_create_main_conf,
//shutdown
&ngx_http_push_store_exit_worker,
&ngx_http_push_store_exit_master,
//async-friendly functions with callbacks
&ngx_http_push_store_get_message, //+callback
&ngx_http_push_store_subscribe, //+callback
&ngx_http_push_store_publish_message, //+callback
//channel stuff,
&ngx_http_push_store_get_channel, //creates channel if not found, +callback
&ngx_http_push_store_find_channel, //returns channel or NULL if not found, +callback
&ngx_http_push_store_delete_channel,
&ngx_http_push_store_get_channel_message,
&ngx_http_push_store_reserve_message,
&ngx_http_push_store_release_message,
//channel properties
&ngx_http_push_store_channel_subscribers,
&ngx_http_push_store_channel_worker_subscribers,
&ngx_http_push_store_channel_next_subscriber,
&ngx_http_push_store_channel_release_subscriber_sentinel,
//legacy shared-memory store helpers
&ngx_http_push_store_lock_shmem,
&ngx_http_push_store_unlock_shmem,
&ngx_http_push_slab_alloc_locked,
&ngx_http_push_slab_free_locked,
//message stuff
&ngx_http_push_store_create_message,
&ngx_http_push_delete_message,
&ngx_http_push_delete_message_locked,
&ngx_http_push_store_enqueue_message,
&ngx_http_push_store_etag_from_message,
&ngx_http_push_store_content_type_from_message,
//interprocess communication
&ngx_http_push_store_send_worker_message,
&ngx_http_push_store_receive_worker_message
};
|
{
"images": [],
"object": {
"uuid": "31A349AD-1715-42A8-AD42-859FEC4E5C0D",
"matrix": [1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,1],
"children": [{
"type": "Mesh",
"name": "Cube",
"uuid": "FD45CB2E-764E-4812-8FA6-BC38846D0FEA",
"position": [0.0,0.0,0.0],
"rotation": [-1.5707964897155762,0.0,0.0],
"scale": [1.0,0.9999999403953552,0.9999999403953552],
"visible": true,
"castShadow": true,
"receiveShadow": true,
"geometry": "D94ACD40-385F-4EB2-949D-A6B64A2857EF"
}],
"type": "Scene"
},
"textures": [],
"metadata": {
"version": 4.3,
"sourceFile": "sofa.blend",
"generator": "io_three",
"type": "Object"
},
"materials": [],
"geometries": [{
"data": {
"metadata": {
"version": 3,
"vertices": 16,
"faces": 14,
"normals": 16,
"generator": "io_three"
},
"vertices": [3.2245755195617676,1.0,-0.006970047950744629,-3.224574565887451,1.0000003576278687,-0.006970047950744629,3.2245771884918213,0.9999995231628418,1.2020148038864136,-3.2245755195617676,1.0,1.2020148038864136,3.2245755195617676,0.0,-0.006970047950744629,-3.2245755195617676,2.384185791015625e-07,-0.006970047950744629,3.2245755195617676,-0.5432227253913879,1.2020148038864136,-3.224575996398926,-0.5432220697402954,1.2020148038864136,3.2245771884918213,0.9999995231628418,3.5758469104766846,-3.2245755195617676,1.0,3.5758469104766846,3.2245755195617676,-4.76837158203125e-07,3.5758469104766846,-3.224575996398926,1.7881393432617188e-07,3.5758469104766846,3.2245755195617676,-3.0509583950042725,-0.006970047950744629,-3.224575996398926,-3.0509583950042725,-0.006970047950744629,3.2245736122131348,-3.050959348678589,1.2020148038864136,-3.224576950073242,-3.0509583950042725,1.2020148038864136],
"faces": [33,6,14,12,4,0,1,2,3,33,5,13,15,7,4,5,6,7,33,5,7,3,1,4,7,8,9,33,2,0,1,3,10,11,9,8,33,0,4,5,1,11,3,4,9,33,7,6,10,11,7,0,12,13,33,0,2,6,4,11,10,0,3,33,7,15,14,6,7,6,1,0,33,8,9,11,10,14,15,13,12,33,6,2,8,10,0,10,14,12,33,3,7,11,9,8,7,13,15,33,2,3,9,8,10,8,15,14,33,12,14,15,13,2,1,6,5,33,4,12,13,5,3,2,5,4],
"normals": [0.8771324753761292,-0.29929500818252563,0.37553027272224426,0.5773491859436035,-0.5773491859436035,0.5773491859436035,0.5773491859436035,-0.5773491859436035,-0.5773491859436035,0.7070833444595337,0.0,-0.7070833444595337,-0.7070833444595337,0.0,-0.7070833444595337,-0.5773491859436035,-0.5773491859436035,-0.5773491859436035,-0.5773491859436035,-0.5773491859436035,0.5773491859436035,-0.8771324753761292,-0.29929500818252563,0.37553027272224426,-0.7070833444595337,0.7070833444595337,0.0,-0.5773491859436035,0.5773491859436035,-0.5773491859436035,0.7070833444595337,0.7070833444595337,0.0,0.5773491859436035,0.5773491859436035,-0.5773491859436035,0.5901058912277222,-0.503158688545227,0.6313058733940125,-0.5901058912277222,-0.503158688545227,0.6313058733940125,0.5773491859436035,0.5773491859436035,0.5773491859436035,-0.5773491859436035,0.5773491859436035,0.5773491859436035],
"name": "CubeGeometry.3"
},
"uuid": "D94ACD40-385F-4EB2-949D-A6B64A2857EF",
"type": "Geometry"
}]
}
|
#!/usr/bin/env python
import time
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
INFO = 1;
WARNING = 2;
level = INFO
def logConfig(l=INFO):
global level
level = l
def info(s,identify='INFO'):
if level == INFO:
print '[%s %s %s][%s %s %s] %s' % (bcolors.OKBLUE,identify,bcolors.ENDC,bcolors.HEADER,time.strftime('%H:%M'),bcolors.ENDC,s)
def warning(s,identify='WARNING'):
if level == WARNING:
print '[%s %s %s][%s %s %s] %s' % (bcolors.OKGREEN,identify,bcolors.ENDC,bcolors.HEADER,time.strftime('%H:%M'),bcolors.ENDC,s)
if __name__ == '__main__':
logConfig(l=INFO)
info('it is info')
logConfig(WARNING)
warning('it is warning')
|
/*
* @Author: your name
* @Date: 2021-02-01 11:45:33
* @LastEditTime: 2021-04-28 18:12:25
* @LastEditors: Please set LastEditors
* @Description: In User Settings Edit
* @FilePath: \vue-admin-template\src\settings.js
*/
module.exports = {
title: 'Vue Admin Template',
/**
* @type {boolean} true | false
* @description Whether fix the header
*/
fixedHeader: false,
authorizationValue: 'Bearer ',
encryptPK: 'cBssbHB3ZA==HKXT',
/**
* @type {boolean} true | false
* @description Whether show the logo in sidebar
*/
sidebarLogo: false
}
|
// Region
// ------
// Manage the visual regions of your composite application. See
// http://lostechies.com/derickbailey/2011/12/12/composite-js-apps-regions-and-region-managers/
Marionette.Region = function(options){
this.options = options || {};
var eventBinder = new Marionette.EventBinder();
_.extend(this, eventBinder, options);
if (!this.el){
var err = new Error("An 'el' must be specified");
err.name = "NoElError";
throw err;
}
if (this.initialize){
this.initialize.apply(this, arguments);
}
};
_.extend(Marionette.Region.prototype, Backbone.Events, {
// Displays a backbone view instance inside of the region.
// Handles calling the `render` method for you. Reads content
// directly from the `el` attribute. Also calls an optional
// `onShow` and `close` method on your view, just after showing
// or just before closing the view, respectively.
show: function(view){
this.ensureEl();
this.close();
view.render();
this.open(view);
if (view.onShow) { view.onShow(); }
view.trigger("show");
if (this.onShow) { this.onShow(view); }
this.trigger("view:show", view);
this.currentView = view;
},
ensureEl: function(){
if (!this.$el || this.$el.length === 0){
this.$el = this.getEl(this.el);
}
},
// Override this method to change how the region finds the
// DOM element that it manages. Return a jQuery selector object.
getEl: function(selector){
return $(selector);
},
// Override this method to change how the new view is
// appended to the `$el` that the region is managing
open: function(view){
this.$el.html(view.el);
},
// Close the current view, if there is one. If there is no
// current view, it does nothing and returns immediately.
close: function(){
var view = this.currentView;
if (!view){ return; }
if (view.close) { view.close(); }
this.trigger("view:closed", view);
delete this.currentView;
},
// Attach an existing view to the region. This
// will not call `render` or `onShow` for the new view,
// and will not replace the current HTML for the `el`
// of the region.
attachView: function(view){
this.currentView = view;
}
});
// Copy the `extend` function used by Backbone's classes
Marionette.Region.extend = Backbone.View.extend;
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "snippetsjava.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
window.__NUXT__=(function(a,b,c,d,e){return {staticAssetsBase:"https:\u002F\u002Fwww.baca-quran.id\u002Fstatic\u002F1627814429",layout:"default",error:b,state:{notification:{show:a,title:c,message:c},isShowSidebar:a,isSupportWebShare:a,headerTitle:"Baca Qur'an",page:"home",lastReadVerse:b,settingActiveTheme:{name:"dark",bgColor:"#071e3d",fgColor:"#fff"},settingShowTranslation:a,settingShowTafsir:a,settingShowMuqaddimah:d,surahFavorite:[]},serverRendered:d,routePath:"\u002Famp\u002F21\u002F95",config:{_app:{basePath:e,assetsPath:e,cdnURL:"https:\u002F\u002Fwww.baca-quran.id\u002F"}}}}(false,null,"",true,"\u002F"));
|
#!/usr/bin/env python3
from .._utils.attribution import LayerAttribution
from .._utils.common import _format_input, _format_additional_forward_args
from .._utils.gradient import compute_layer_gradients_and_eval
class LayerGradientXActivation(LayerAttribution):
def __init__(self, forward_func, layer, device_ids=None):
r"""
Args:
forward_func (callable): The forward function of the model or any
modification of it
layer (torch.nn.Module): Layer for which attributions are computed.
Output size of attribute matches this layer's input or
output dimensions, depending on whether we attribute to
the inputs or outputs of the layer, corresponding to
attribution of each neuron in the input or output of
this layer.
Currently, it is assumed that the inputs or the outputs
of the layer, depending on which one is used for
attribution, can only be a single tensor.
device_ids (list(int)): Device ID list, necessary only if forward_func
applies a DataParallel model. This allows reconstruction of
intermediate outputs from batched results across devices.
If forward_func is given as the DataParallel model itself,
then it is not necessary to provide this argument.
"""
super().__init__(forward_func, layer, device_ids)
def attribute(
self,
inputs,
target=None,
additional_forward_args=None,
attribute_to_layer_input=False,
):
r"""
Computes element-wise product of gradient and activation for selected
layer on given inputs.
Args:
inputs (tensor or tuple of tensors): Input for which attributions
are computed. If forward_func takes a single
tensor as input, a single input tensor should be provided.
If forward_func takes multiple tensors as input, a tuple
of the input tensors should be provided. It is assumed
that for all given input tensors, dimension 0 corresponds
to the number of examples, and if multiple input tensors
are provided, the examples must be aligned appropriately.
target (int, tuple, tensor or list, optional): Output indices for
which gradients are computed (for classification cases,
this is usually the target class).
If the network returns a scalar value per example,
no target index is necessary.
For general 2D outputs, targets can be either:
- a single integer or a tensor containing a single
integer, which is applied to all input examples
- a list of integers or a 1D tensor, with length matching
the number of examples in inputs (dim 0). Each integer
is applied as the target for the corresponding example.
For outputs with > 2 dimensions, targets can be either:
- A single tuple, which contains #output_dims - 1
elements. This target index is applied to all examples.
- A list of tuples with length equal to the number of
examples in inputs (dim 0), and each tuple containing
#output_dims - 1 elements. Each tuple is applied as the
target for the corresponding example.
Default: None
additional_forward_args (tuple, optional): If the forward function
requires additional arguments other than the inputs for
which attributions should not be computed, this argument
can be provided. It must be either a single additional
argument of a Tensor or arbitrary (non-tuple) type or a
tuple containing multiple additional arguments including
tensors or any arbitrary python types. These arguments
are provided to forward_func in order following the
arguments in inputs.
Note that attributions are not computed with respect
to these arguments.
Default: None
attribute_to_layer_input (bool, optional): Indicates whether to
compute the attribution with respect to the layer input
or output. If `attribute_to_layer_input` is set to True
then the attributions will be computed with respect to
layer input, otherwise it will be computed with respect
to layer output.
Note that currently it is assumed that either the input
or the output of internal layer, depending on whether we
attribute to the input or output, is a single tensor.
Support for multiple tensors will be added later.
Default: False
Returns:
*tensor* of **attributions**:
- **attributions** (*tensor*):
Product of gradient and activation for each
neuron in given layer output.
Attributions will always be the same size as the
output of the given layer.
Examples::
>>> # ImageClassifier takes a single input tensor of images Nx3x32x32,
>>> # and returns an Nx10 tensor of class probabilities.
>>> # It contains an attribute conv1, which is an instance of nn.conv2d,
>>> # and the output of this layer has dimensions Nx12x32x32.
>>> net = ImageClassifier()
>>> layer_ga = LayerGradientXActivation(net, net.conv1)
>>> input = torch.randn(2, 3, 32, 32, requires_grad=True)
>>> # Computes layer activation x gradient for class 3.
>>> # attribution size matches layer output, Nx12x32x32
>>> attribution = layer_ga.attribute(input, 3)
"""
inputs = _format_input(inputs)
additional_forward_args = _format_additional_forward_args(
additional_forward_args
)
# Returns gradient of output with respect to
# hidden layer and hidden layer evaluated at each input.
layer_gradients, layer_eval = compute_layer_gradients_and_eval(
self.forward_func,
self.layer,
inputs,
target,
additional_forward_args,
device_ids=self.device_ids,
attribute_to_layer_input=attribute_to_layer_input,
)
return layer_gradients * layer_eval
|
(function() {
var debug = false;
var module = {
debug: debug,
inputSelector: '.annotation-input',
tagSelector: '.tag',
tagsSelector: '.tags',
commentSelector: 'textarea.comment',
valueSelector: 'input.value', // stash tag selections and comment here as a JSON string...
singleSelect: true,
init: function() {
var that = this;
if (this.debug) { console.log('annotation input loaded: '); }
$(this.inputSelector).each(function(index, el) {
if (!$(el).data('listening')) {
$(el).delegate(that.tagSelector, 'click', $.proxy(that.onClickTag, that));
$(el).delegate(that.commentSelector, 'change', $.proxy(that.onChangeComment, that));
$(el).data('listening', 'yes');
}
});
},
onChangeComment: function(e) {
var value_el = this.findValueEl(e.target);
var current_value = this.loadValue(value_el);
var target_value = $(e.target).val();
current_value.comment = target_value;
this.storeValue(value_el, current_value);
},
onClickTag: function(e) {
var target_el = e.target,
target_value, target_index;
var value_el, current_value;
value_el = this.findValueEl(e.target);
current_value = this.loadValue(value_el);
target_value = $(e.target).data('id');
if (!$(target_el).hasClass('selected')) {
if (this.singleSelect) {
current_value.options = [target_value];
} else {
current_value.options.push(target_value);
}
} else {
if (this.singleSelect) {
current_value.options = [];
} else {
target_index = current_value.options.indexOf(target_value);
if (target_index !== -1) {
current_value.options.splice(target_index, 1);
}
}
}
this.storeValue(value_el, current_value);
if (this.singleSelect) {
$(target_el).closest(this.tagsSelector)
.find(this.tagSelector)
.not(target_el)
.removeClass('selected');
}
$(target_el).toggleClass('selected');
},
findValueEl: function(target_el) {
var input_el = $(target_el).closest(this.inputSelector);
return $(this.valueSelector, input_el);
},
loadValue: function(value_el) {
var json = $(value_el).val();
var result = JSON.parse(json);
if (result === null) {
result = {};
}
if (!result.hasOwnProperty('options')) {
result.options = [];
}
if (!result.hasOwnProperty('comment')) {
result.comment = '';
}
return result;
},
storeValue: function(value_el, new_value) {
var json = JSON.stringify(new_value);
$(value_el).val(json);
}
};
module.init();
}).call(this);
|
/*
*
* Copyright (c) 2021 Project CHIP Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Import helpers from zap core
const zapPath = '../../../../../third_party/zap/repo/dist/src-electron/';
const templateUtil = require(zapPath + 'generator/template-util.js')
const zclHelper = require(zapPath + 'generator/helper-zcl.js')
const ChipTypesHelper = require('../../../../../src/app/zap-templates/common/ChipTypesHelper.js');
const StringHelper = require('../../../../../src/app/zap-templates/common/StringHelper.js');
// Ideally those clusters clusters endpoints should be retrieved from the
// descriptor cluster.
function asExpectedEndpointForCluster(clusterName)
{
switch (clusterName) {
case 'AdministratorCommissioning':
case 'Basic':
case 'Descriptor':
case 'GeneralCommissioning':
case 'GeneralDiagnostics':
case 'SoftwareDiagnostics':
case 'ThreadNetworkDiagnostics':
case 'EthernetNetworkDiagnostics':
case 'WiFiNetworkDiagnostics':
case 'GroupKeyManagement':
case 'NetworkCommissioning':
case 'OperationalCredentials':
case 'TrustedRootCertificates':
case 'OtaSoftwareUpdateProvider':
case 'OtaSoftwareUpdateRequestor':
return 0;
}
return 1;
}
function asTestValue()
{
if (StringHelper.isOctetString(this.type)) {
return '[@"Test" dataUsingEncoding:NSUTF8StringEncoding]';
} else if (StringHelper.isCharString(this.type)) {
return '@"Test"';
} else {
return this.min || this.max || 0;
}
}
function asObjectiveCBasicType(type)
{
if (StringHelper.isOctetString(type)) {
return 'NSData *';
} else if (StringHelper.isCharString(type)) {
return 'NSString *';
} else {
return ChipTypesHelper.asBasicType(this.chipType);
}
}
function asObjectiveCNumberType(label, type, asLowerCased)
{
function fn(pkgId)
{
const options = { 'hash' : {} };
return zclHelper.asUnderlyingZclType.call(this, type, options)
.then(zclType => {
const basicType = ChipTypesHelper.asBasicType(zclType);
switch (basicType) {
case 'bool':
return 'Bool';
case 'uint8_t':
return 'UnsignedChar';
case 'uint16_t':
return 'UnsignedShort';
case 'uint32_t':
return 'UnsignedLong';
case 'uint64_t':
return 'UnsignedLongLong';
case 'int8_t':
return 'Char';
case 'int16_t':
return 'Short';
case 'int32_t':
return 'Long';
case 'int64_t':
return 'LongLong';
default:
error = label + ': Unhandled underlying type ' + zclType + ' for original type ' + type;
throw error;
}
})
.then(typeName => asLowerCased ? (typeName[0].toLowerCase() + typeName.substring(1)) : typeName);
}
const promise = templateUtil.ensureZclPackageId(this).then(fn.bind(this)).catch(err => console.log(err));
return templateUtil.templatePromise(this.global, promise)
}
function asTestIndex(index)
{
return index.toString().padStart(6, 0);
}
//
// Module exports
//
exports.asObjectiveCBasicType = asObjectiveCBasicType;
exports.asObjectiveCNumberType = asObjectiveCNumberType;
exports.asExpectedEndpointForCluster = asExpectedEndpointForCluster;
exports.asTestIndex = asTestIndex;
exports.asTestValue = asTestValue;
|
import chess
import numpy as np
import argparse
import random
import os
import convert_board as convert
def generate(raw_data, max_size, len_history=8, shuffle=False):
dir = {"1-0": 1, "1/2-1/2": 0, "0-1": -1}
raw_data = raw_data.split("\n")[5:]
random.shuffle(raw_data)
size = 0
for match in raw_data:
if size >= max_size:
break
result = match.split(" ")[2]
moves = match.split("###")[1].split(" ")
moves = list(filter(None, moves))
board = chess.Board()
history = []
for idx, move in enumerate(moves):
if size >= max_size:
break
size += 1
raw_move = move.split(".")[1]
try:
board.push_san(raw_move.strip())
fen = board.fen()
if idx is not len(moves) - 1:
next_move = board.parse_san(moves[idx + 1].split(".")[1])
except:
raise Exception("invalid data point after %d examples" % size)
if len(history) == len_history:
history.pop(0)
history.append(fen)
yield history, (str(next_move), result)
def format_number(num):
suffixes = ['', 'K', 'M', 'G', 'T', 'P']
num = float('{:.3g}'.format(num))
m = 0
while abs(num) > 1000:
m += 1
num /= 1000.0
return '{}{}'.format('{:f}'.format(num).rstrip('0').rstrip('.'), suffixes[m])
def write_files(data_gen, path, size, is_testset=False):
#writes data and labels
trainfilename = "%strain_data_%s" if not is_testset else "%stest_data_%s"
labelsfilename = "%strain_labels_%s" if not is_testset else "%stest_labels_%s"
size = int(round(0.25 * size)) if is_testset else size
datafile = open(trainfilename % (path, format_number(size)), 'a')
labelsfile = open(labelsfilename % (path, format_number(size)), 'a')
for data, label in data_gen:
for idx, fen in enumerate(data):
if idx < len(data) - 1:
datafile.writelines("%s," % fen)
else:
datafile.writelines("%s\n" % fen)
labelsfile.writelines("%s\n" % "%s,%s" % (label[0], label[1]))
datafile.close()
labelsfile.close()
def empty_file(path):
open(path, "w").close()
def shuffle(paths):
assert isinstance(paths, list)
assert isinstance(paths[0], tuple)
for path in paths:
datafile = open(path[0], "r")
labelsfile = open(path[1], "r")
data = datafile.readlines()
labels = labelsfile.readlines()
open(path[0], "w").close()
open(path[1], "w").close()
c = list(zip(data, labels))
random.shuffle(c)
data, labels = zip(*c)
datafile = open(path[0], "w")
labelsfile = open(path[1], "w")
datafile.writelines("%s" % fen for fen in data)
labelsfile.writelines("%s" % label for label in labels)
datafile.close()
labelsfile.close()
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--size', required=True, type=int)
parser.add_argument('-p', '--path', required=True)
parser.add_argument('-lh', '--len_history', type=int)
parser.add_argument('-gt', '--generate_testset', action='store_true')
parser.add_argument('-sh', '--shuffle', action='store_true')
args = parser.parse_args()
size = args.size
path = args.path
len_history = args.len_history
generate_testset = args.generate_testset
bool_shuffle = args.shuffle
f = open("data/raw_data.txt", "r")
raw_data = f.read()
f.close()
data_gen = generate(raw_data, size, len_history=len_history, shuffle=bool_shuffle)
write_files(data_gen, path, size)
if generate_testset:
data_gen = generate(raw_data, size * 0.25, len_history=len_history, shuffle=bool_shuffle)
write_files(data_gen, path, size, is_testset=True)
if bool_shuffle:
trainset = ("%strain_data_%s" % (path, format_number(size)), "%strain_labels_%s" % (path, format_number(size)))
if generate_testset:
testset = ("%stest_data_%s" % (path, format_number(int(size * 0.25))), "%stest_labels_%s" % (path, format_number(int(size * 0.25))))
shuffle([trainset,testset])
else:
shuffle([trainset])
|
import React, { Component } from 'react';
import { View, StyleSheet } from 'react-native';
import { Chess } from 'chess.js';
import ChessBoard from '../lib';
const HTTP_BASE_URL = 'https://en.lichess.org';
const SOCKET_BASE_URL = 'wss://socket.lichess.org';
export default class PlayerVsPlayer extends Component {
constructor(props) {
super(props);
this.state = {
game: new Chess(),
};
}
componentDidMount() {
// get lila token
fetch(`${HTTP_BASE_URL}/account/info`).then(() => {
this.clientId = Math.random().toString(36).substring(2);
this.createGame();
});
}
createGame() {
fetch(`${HTTP_BASE_URL}/setup/friend`, {
method: 'POST',
headers: {
Accept: 'application/vnd.lichess.v2+json',
'Content-Type': 'application/json',
},
body: JSON.stringify({
variant: '1',
timeMode: '1',
days: '2',
time: '10',
increment: '0',
color: 'white',
mode: '0',
}),
})
.then(res => res.json())
.then(res => {
const socketId = res.challenge.id;
const socketUrl = `${SOCKET_BASE_URL}/challenge/${socketId}/socket/v2?sri=${this.clientId}&mobile=1`;
this.createSocket(socketUrl, socketId);
});
}
createSocket = (socketUrl, socketId) => {
console.log('socket: ' + socketUrl);
this.ws = new WebSocket(socketUrl);
this.ws.onmessage = e => {
// a message was received
console.log(`received: ${e.data}`);
const data = JSON.parse(e.data);
if (data.t === 'reload' && data.v === 3 && !this.gameFetched) {
this.gameFetched = true;
// this sets cookie
fetch(`${HTTP_BASE_URL}/challenge/${socketId}`).then(() => {
fetch(`${HTTP_BASE_URL}/${socketId}`, {
headers: {
Accept: 'application/vnd.lichess.v2+json',
'Content-Type': 'application/json',
},
})
.then(res => res.json())
.then(res => {
if (res.url && res.url.socket) {
const socketUrl = `${SOCKET_BASE_URL}${res.url.socket}?sri=${this.clientId}&mobile=1`;
clearInterval(this.intervalId);
this.createSocket(socketUrl);
}
});
});
}
let uci;
if (data.t === 'move' && data.d.ply % 2 === 0) {
uci = data.d.uci;
} else if (data.t === 'b') {
const first = data.d[0];
if (first && first.d.status && first.d.status.name === 'mate') {
uci = first.d.uci;
}
}
if (uci) {
const from = uci.substring(0, 2);
const to = uci.substring(2, 4);
this.board.movePiece(to, from);
}
};
this.ws.onerror = e => {
// an error occurred
console.log(e.message);
};
this.ws.onclose = e => {
console.log(e.code, e.reason);
};
this.ws.onopen = () => {
console.log('ws open');
// ping every second
this.intervalId = setInterval(
() => {
this.sendMessage({ t: 'p', v: 2 });
},
1000,
);
};
};
sendMessage(obj) {
const str = JSON.stringify(obj);
console.log(`sending: ${str}`);
this.ws.send(str);
}
onMove = ({ from, to }) => {
const { game } = this.state;
game.move({
from,
to,
});
if (game.turn() === 'b') {
this.sendMessage({
t: 'move',
d: {
from,
to,
},
});
}
};
shouldSelectPiece = piece => {
const { game } = this.state;
const turn = game.turn();
if (
game.in_checkmate() === true ||
game.in_draw() === true ||
turn !== 'w' ||
piece.color !== 'w'
) {
return false;
}
return true;
};
render() {
const { fen } = this.state;
return (
<View style={styles.container}>
<ChessBoard
ref={board => this.board = board}
fen={fen}
size={340}
shouldSelectPiece={this.shouldSelectPiece}
onMove={this.onMove}
/>
</View>
);
}
}
const styles = StyleSheet.create({
container: {
flex: 1,
justifyContent: 'center',
alignItems: 'center',
padding: 16,
backgroundColor: '#EEEEEE',
},
});
|
class User:
"""More or less just a container to hold information on the viewer who sent a message.
Parameters
-----------
name : str
The username of the viewer.
uid : str
The user ID of the viewer.
broadcaster : bool
True if this viewer is the broadcaster (streamer).
moderator : bool
True if this viewer is a moderator of the channel.
subscriber : bool
True if this viewer is a subscriber of the channel.
sub_legnth : int
If the viewer is a subscriber, how long in months they've been subscribed.
If they are not a subscriber, this will be 0.
badges : [str]
A list of all the chat badges the viewer has.
Attributes
------------
See Parameters
Note
------------
Does not keep track of follower status because that requries an API call.
"""
def __init__(self, name: str, uid: str, broadcaster: bool, moderator: bool, subscriber: bool, sub_length: int, badges: [str]):
# variables given
self.name = name
self.id = uid
self.broadcaster = broadcaster
self.moderator = moderator
self.subscriber = subscriber
self.sub_length = sub_length
self.badges = badges
###################### GETTER FUNCTIONS ######################
def get_name(self) -> str:
return self.name
def get_id(self) -> str:
return self.name
def is_broadcaster(self) -> bool:
return self.broadcaster
def is_mod(self) -> bool:
return self.moderator
def is_sub(self) -> bool:
return self.subscriber
def get_sub_length(self) -> int:
return self.sub_length
def get_badges(self) -> [str]:
return self.badges
|
module.exports = {
all: {
expand: true,
cwd: "<%= paths.src.images %>",
src: ["**/*.svg"],
dest: "<%= paths.dist %>/svgmin.tmp"
}
}
|
// Copyright 2020 Las Venturas Playground. All rights reserved.
// Use of this source code is governed by the MIT license, a copy of which can
// be found in the LICENSE file.
// @ts-check
import { Color } from 'base/color.js';
import { Rect } from 'base/rect.js';
import { ZoneAreaManager } from 'features/gang_zones/zone_area_manager.js';
import { ZoneGang } from 'features/gang_zones/structures/zone_gang.js';
import { Zone } from 'features/gang_zones/structures/zone.js';
describe('ZoneAreaManager', (it, beforeEach, afterEach) => {
// Fake class representing the same interface as ZoneNatives, to mock out what the manager would
// have done on the SA-MP server to create the gang zones.
class FakeZoneNatives {
zoneId_ = 1;
zones = new Map();
showForPlayerCalls = 0;
createZone(area, color) {
this.zones.set(this.zoneId_, { area, color });
return this.zoneId_++;
}
showZoneForPlayer(player, zoneId, color) {
this.showForPlayerCalls++;
}
deleteZone(zoneId) {
this.zones.delete(zoneId);
}
}
/**
* @type ZoneAreaManager
*/
let manager = null;
/**
* @type FakeZoneNatives
*/
let natives = null;
beforeEach(() => {
natives = new FakeZoneNatives();
manager = new ZoneAreaManager(null, natives);
});
afterEach(() => manager.dispose());
it('should be able to create and delete zones on the server', assert => {
const zoneGang = new ZoneGang(/* id= */ 9001);
zoneGang.initialize({ id: 9001, color: null, name: 'BA Hooligans' });
const zone = new Zone(zoneGang, {
area: new Rect(40, 50, 150, 150),
});
assert.equal(natives.zones.size, 0);
manager.createZone(zone);
assert.equal(natives.zones.size, 1);
for (const createdZone of natives.zones.values()) {
assert.equal(createdZone.area.minX, 40);
assert.equal(createdZone.area.minY, 50);
}
assert.equal(natives.showForPlayerCalls, 0);
dispatchEvent('playerspawn', {
playerid: server.playerManager.getById(/* Gunther= */ 0).id,
});
assert.equal(natives.showForPlayerCalls, 1);
manager.deleteZone(zone);
assert.equal(natives.zones.size, 0);
});
it('should force a configured alpha channel on all gang zones', assert => {
const zoneGang = new ZoneGang(/* id= */ 9001);
zoneGang.initialize({ id: 9001, color: Color.fromHex('FFFFFFFF'), name: 'BA Hooligans' });
const zone = new Zone(zoneGang, {
area: new Rect(40, 50, 150, 150),
});
manager.createZone(zone);
assert.equal(natives.zones.size, 1);
for (const createdZone of natives.zones.values())
assert.notEqual(createdZone.color.a, 255);
});
});
|
import React from 'react';
/* eslint-disable import/no-extraneous-dependencies */
import { storiesOf } from '@storybook/react';
import { withInfo } from '@storybook/addon-info';
import { Slider } from '../src';
const displayName = Slider.displayName || 'Slider';
const title = 'Simple usage';
const description = `
This is some basic usage with the slider component as built for Raise Effect, updated with changes for latest rc-slider. Due to the nature of the component, a higher order component is required to contain local state.
Sliding should trigger changes that affect the local state.`;
const demoCode = () => {
class CustomSlider extends React.Component {
constructor(props) {
super(props);
this.state = {
value: 0,
};
this.handleChange = this.handleChange.bind(this);
}
handleChange(value) {
this.setState({ value });
}
render() {
return (
<div className="slider-container">
<Slider
min={0}
max={100}
value={this.state.value}
onChange={this.handleChange}
/>
</div>
);
}
}
// NOTE - ONLY return block will be documented as src - find out how to show above code
return (<CustomSlider />);
};
// const propDocs = { inline: true, propTables: [Slider] };
export default () => storiesOf(displayName, module)
.add(title, demoCode);
|
"""photo URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path,include
urlpatterns = [
path(r'admin/', admin.site.urls),
path('',include('gallery.urls'))
]
|
#!/usr/bin/python
# Copyright (c) 2018 Confetti Interactive Inc.
#
# This file is part of The-Forge
# (see https://github.com/ConfettiFX/The-Forge).
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import os.path
import shutil #Used for deleting files within subdirectories
import fnmatch #Checks for matching expression in name
import time #used for timing process in case one hangs without crashing
import platform #Used for determining running OS
import subprocess #Used for spawning processes
import sys #system module
import argparse #Used for argument parsing
import traceback
import signal #used for handling ctrl+c keyboard interrupt
import xml.etree.ElementTree as ET #used for parsing XML ubuntu project file
import shutil #used for deleting directories
from distutils.dir_util import copy_tree #used for copying directories
import re # Used for retrieving console IP
successfulBuilds = [] #holds all successfull builds
failedBuilds = [] #holds all failed builds
successfulTests = [] #holds all successfull tests
failedTests = [] #holds all failed tests
maxIdleTime = 45 #10 seconds of max idle time with cpu usage null
def PrintResults():
if len(successfulBuilds) > 0:
print ("Successful Builds list:")
for build in successfulBuilds:
print(build['name'], build['conf'], build['platform'])
print ("")
if len(failedBuilds) > 0:
print ("Failed Builds list:")
for build in failedBuilds:
print(build['name'], build['conf'], build['platform'])
if len(successfulTests) > 0:
print ("Successful tests list:")
for test in successfulTests:
if test['gpu'] == "":
print(test['name'])
else:
print(test['name'], test['gpu'])
print ("")
if len(failedTests) > 0:
print ("Failed Tests list:")
for test in failedTests:
if test['gpu'] == "":
print(test['name'], test['reason'])
else:
print(test['name'], test['gpu'], test['reason'])
def FindMSBuild17():
ls_output = ""
msbuildPath = ""
try:
#proc = subprocess.Popen(["C:/Program Files (x86)/Microsoft Visual Studio/Installer/vswhere.exe", "-latest", "-products", "*", "-requires" ,"Microsoft.Component.MSBuild", "-find", "MSBuild\**\Bin\MSBuild.exe"], stdout=subprocess.PIPE,stderr = subprocess.STDOUT, encoding='utf8')
#open vswhere and parse the output
proc = subprocess.Popen(["C:/Program Files (x86)/Microsoft Visual Studio/Installer/vswhere.exe", "-version", "[15.0,16.0)", "-requires" ,"Microsoft.Component.MSBuild", "-property", "installationPath"], stdout=subprocess.PIPE,stderr = subprocess.STDOUT, encoding='utf8')
ls_output = proc.communicate()[0]
# In case there is more than 1 Visual studio (community, professional, ...) installed on the machine use the first one
ls_output = ls_output.split('\n')[0]
#check if vswhere opened correctly
if proc.returncode != 0:
print("Could not find vswhere")
else:
msbuildPath = ls_output.strip() + "/MSBuild/15.0/Bin/MSBuild.exe"
except Exception as ex:
#ERROR
print(ex)
print(ls_output)
print("Could not find vswhere")
return msbuildPath
#define string is a list of #defines separated by \n.
#Example: #define ACTIVE_TESTING_GPU 1\n#define AUTOMATED_TESTING 1\n
def AddPreprocessorToFile(filePath, defineString, stringToReplace):
if not os.path.exists(filePath):
return
with open(filePath,'r+') as f:
lines = f.readlines()
for i, line in enumerate(lines):
if line.startswith(stringToReplace) :
if "#pragma" in stringToReplace:
lines[i]=line.replace(line,line +defineString)
else:
lines[i]=line.replace(line,defineString + "\n" + line)
break
f.seek(0)
for line in lines:
f.write(line)
def RemovePreprocessorFromFile(filePath, definesList):
if not os.path.exists(filePath):
return
with open(filePath,'r+') as f:
lines = f.readlines()
f.seek(0)
for line in lines:
writeLine = True
for define in definesList:
if define in line:
writeLine = False
break
if writeLine == True:
f.write(line)
f.truncate()
def AddTestingPreProcessor(enabledGpuSelection):
if setDefines == True:
print("Adding Automated testing preprocessor defines")
macro = "#define AUTOMATED_TESTING 1"
if enabledGpuSelection:
macro += "\n#define ACTIVE_TESTING_GPU 1"
AddPreprocessorToFile("Common_3/OS/Interfaces/IOperatingSystem.h", macro + "\n", "#pragma")
if setMemTracker == True:
print("Adding Memory tracking preprocessor defines")
macro = "#define USE_MEMORY_TRACKING 1"
AddPreprocessorToFile("Common_3/OS/Interfaces/IMemory.h", macro, "#if")
AddPreprocessorToFile("Common_3/OS/MemoryTracking/MemoryTracking.cpp", macro, "#if")
def RemoveTestingPreProcessor():
testingDefines = ["#define AUTOMATED_TESTING", "#define ACTIVE_TESTING_GPU"]
memTrackingDefines = ["#define USE_MEMORY_TRACKING"]
if setDefines == True:
print("Removing automated testing preprocessor defines")
RemovePreprocessorFromFile("Common_3/OS/Interfaces/IOperatingSystem.h", testingDefines)
if setMemTracker == True:
print("Removing memory tracking preprocessor defines")
RemovePreprocessorFromFile("Common_3/OS/Interfaces/IMemory.h", memTrackingDefines)
RemovePreprocessorFromFile("Common_3/OS/MemoryTracking/MemoryTracking.cpp", memTrackingDefines)
def ExecuteTimedCommand(cmdList,outStream=subprocess.PIPE):
try:
if isinstance(cmdList, list):
print("Executing Timed command: " + ' '.join(cmdList))
else:
print("Executing Timed command: " + cmdList)
#10 minutes timeout
proc = subprocess.run(cmdList, capture_output=True, timeout=maxIdleTime)
if proc.returncode != 0:
return proc.returncode
except subprocess.TimeoutExpired as timeout:
print(timeout)
print("App hanged and was forcibly closed.")
return -1
except Exception as ex:
print("-------------------------------------")
if isinstance(cmdList, list):
print("Failed Executing Timed command: " + ' '.join(cmdList))
else:
print("Failed Executing Timed command: " + cmdList)
print(ex)
print("-------------------------------------")
return -1 #error return code
print("Success")
return 0 #success error code
def ExecuteCommandWOutput(cmdList, printException = True):
try:
print("")
print("Executing command: " + ' '.join(cmdList))
print("")
ls_lines = subprocess.check_output(cmdList).splitlines()
return ls_lines
except Exception as ex:
if printException == True:
print("-------------------------------------")
print("Failed executing command: " + ' '.join(cmdList))
print(ex)
print("-------------------------------------")
return "" #error return code
return "" #success error code
def ExecuteCommand(cmdList,outStream):
try:
print("")
if isinstance(cmdList, list):
print("Executing command: " + ' '.join(cmdList))
else:
print("Executing command: " + cmdList)
print("")
proc = subprocess.Popen(cmdList, stdout=outStream)
proc.wait()
if proc.returncode != 0:
return proc.returncode
except Exception as ex:
print("-------------------------------------")
if isinstance(cmdList, list):
print("Failed Executing command: " + ' '.join(cmdList))
else:
print("Failed Executing command: " + cmdList)
print(ex)
print("-------------------------------------")
return -1 #error return code
return 0 #success error code
def ExecuteCommandErrorOnly(cmdList):
try:
print("")
print("Executing command: " + ' '.join(cmdList))
print("")
DEVNULL = open(os.devnull, 'w')
proc = subprocess.Popen(cmdList, stdout=DEVNULL, stderr=subprocess.STDOUT)
proc.wait()
if proc.returncode != 0:
return proc.returncode
except Exception as ex:
print("-------------------------------------")
print("Failed executing command: " + ' '.join(cmdList))
print(ex)
print("-------------------------------------")
return -1 #error return code
return 0 #success error code
def ExecuteBuild(cmdList, fileName, configuration, platform):
returnCode = ExecuteCommand(cmdList, sys.stdout)
if returnCode != 0:
print("FAILED BUILDING ", fileName, configuration)
failedBuilds.append({'name':fileName,'conf':configuration, 'platform':platform})
else:
successfulBuilds.append({'name':fileName,'conf':configuration, 'platform':platform})
return returnCode
def ExecuteTest(cmdList, fileName, regularCommand, gpuLine = ""):
if regularCommand:
returnCode = ExecuteCommand(cmdList, None)
else:
returnCode = ExecuteTimedCommand(cmdList,None)
if returnCode != 0:
print("FAILED TESTING ", fileName)
print("Return code: ", returnCode)
failedTests.append({'name':fileName, 'gpu':gpuLine, 'reason':"Runtime Failure"})
else:
successfulTests.append({'name':fileName, 'gpu':gpuLine})
return returnCode
def GetBundleIDFromIOSApp(filename):
try:
#need to parse xml configuration to get every project
import json
filename = filename + "/Info.plist"
if not os.path.exists(filename):
return ""
fileContents = ExecuteCommandWOutput(["plutil", "-convert", "json", "-o", "-", "--",filename])
fileContents = (b"".join(fileContents)).decode('utf-8')
if fileContents == "":
return fileContents
plistJson = json.loads(fileContents)
return plistJson["CFBundleIdentifier"]
except Exception as ex:
print("Failed retrieving plist file")
print(ex)
return ""
#Get list of folders in given root with the given name
#xan specific depth to look only under a limited amount of child dirs
#default depth value is -1 --> no limit on depth
def FindFolderPathByName(rootToSearch, name, depth = -1):
folderPathList = []
finalPath = rootToSearch
# traverse root directory, and list directories as dirs and files as files
for root, dirs, files in os.walk(finalPath):
for dirName in fnmatch.filter(dirs, name):
folderPathList.append(os.path.join(os.path.join(root,dirName)) + os.path.sep)
if depth == 0:
break
depth = depth - 1
return folderPathList
def GetFilesPathByExtension(rootToSearch, extension, wantDirectory, maxDepth=-1):
filesPathList = []
finalPath = rootToSearch
# traverse root directory, and list directories as dirs and files as files
for root, dirs, files in os.walk(finalPath):
if wantDirectory:
#Need to test that
#for dirName in fnmatch.filter(dirs, "*."+extension):
# filesPathList.append(os.path.join(root,dirName)
#in mac os the xcodeproj are not files but packages so they act as directories
path = root.split(os.sep)
#separating the root to get extentions will give us ['path_here'/01_Transformations, xcodeproj]
pathLast = path[-1]
if pathLast:
pathLastExt = pathLast.split(os.extsep)[-1]
if pathLastExt == extension:
filesPathList.append(root)
else:
for filename in fnmatch.filter(files, "*."+extension):
filesPathList.append(os.path.join(root,filename))
if maxDepth == 0:
break
maxDepth = maxDepth - 1
return filesPathList
def GetMemLeakFile(exeFilePath):
print (exeFilePath)
exeFileWithoutExt = exeFilePath.split('.')[0]
memLeakFile = exeFileWithoutExt + ".memleaks"
return memLeakFile
"""
projRootFolder should be one of those:
-Unit_Tests
-Aura
-VisibilityBuffer
This function will mark the first available gpu config as used (this should be called after a run)
It returns false if there are no gpu's left to test, true otherwise
If No GPu's are left then it will recover the file
"""
activeGpusConfiguration = """#
#<vendor_id>, <model_id>, <sli_mode>
0x10de; 0x1b81; false; Nvidia Geforce GTX 1070;
0x10de; 0x1402; false; Nvidia Geforce GTX 950;
0x1002; 0x687f; false; AMD Vega;
0x1002; 0x67df; false; AMD Radeon RX 480;
"""
originalActiveGpuConfigLines = []
def selectActiveGpuConfig(forgeDir, projRootFolder, projectName, runIndex):
global activeGpusConfiguration
global originalActiveGpuConfigLines
#remove file extension from project name
projectName = os.path.splitext(projectName)[0]
#need to have
if "Aura" in projectName or "Visibility" in projectName:
filename = "/Examples_3/"+projRootFolder+"/src/GPUCfg/activeTestingGpu.cfg"
else:
filename = "/Examples_3/"+projRootFolder+"/src/"+projectName+"/GPUCfg/activeTestingGpu.cfg"
filename = forgeDir + filename
#create active gpu config if it doesn't exist
#this is only valid for our internal testing rig
if not os.path.exists(filename):
with open(filename, 'w+') as f:
f.write(activeGpusConfiguration)
removedMatch = False
foundMatch = False
lineMatch = ""
with open(filename,'r+') as f:
lines = f.readlines()
if runIndex == 0:
originalActiveGpuConfigLines = []
for i,line in enumerate(lines):
originalActiveGpuConfigLines.append(line)
if not line.startswith('#'):
return {'running':True, 'lineMatch': line}
for i, line in enumerate(lines):
if not line.strip():
continue
if not line.startswith('#') and not removedMatch:
lines[i]=line.replace(line,"# " +line)
removedMatch = True
continue
if removedMatch and not line.startswith('#'):
print("Found line", line)
lineMatch = line
foundMatch = True
break
if foundMatch:
f.seek(0)
for line in lines:
f.write(line)
else:
f.seek(0)
f.truncate()
for line in originalActiveGpuConfigLines:
f.write(line)
#if we are done then we can remove the file
if not foundMatch and os.path.exists(filename):
try:
os.remove(filename)
except OSError as e: ## if failed, report it back to the user ##
print(("Error: %s - %s." % (e.filename, e.strerror)))
return {'running':foundMatch, 'lineMatch': lineMatch}
def TestXcodeProjects(iosTesting, macOSTesting, iosDeviceId):
errorOccured = False
projects = GetFilesPathByExtension("./Examples_3/","app", True)
iosApps = []
osxApps = []
appsToTest = []
for proj in projects:
if "Release" in proj:
#we don't want to build Xbox one solutions when building PC
if "_iOS" in proj:
iosApps.append(proj)
else :
osxApps.append(proj)
if iosTesting:
appsToTest = iosApps
if macOSTesting:
appsToTest.extend(osxApps)
for app in appsToTest:
leaksDetected = False
#get working directory (excluding the xcodeproj in path)
rootPath = os.sep.join(app.split(os.sep)[0:-1])
filename = app.split(os.sep)[-1].split(os.extsep)[0]
#save current work dir
currDir = os.getcwd()
#change dir to xcodeproj location
os.chdir(rootPath)
command = []
retCode = -1
# get the memory leak file path
memleakFile = GetMemLeakFile(filename)
if "_iOS" in filename:
#if specific ios id was passed then run for that device
#otherwise run on first device available
#print iosDeviceId
if iosDeviceId == "-1" or iosDeviceId == "":
command = ["ios-deploy","--uninstall","-b",filename + ".app","-I"]
else:
command = ["ios-deploy","--uninstall","-b",filename + ".app","-I", "--id", iosDeviceId]
# force Metal validation layer for iOS
command.append("-s METAL_DEVICE_WRAPPER_TYPE=1")
retCode = ExecuteTest(command, filename, True)
if retCode == 0:
bundleID = GetBundleIDFromIOSApp(filename + ".app")
if bundleID != "":
command = ["ios-deploy","--bundle_id",bundleID,"--download=/Library/Application Support/"+memleakFile,"--to","./"]
if not iosDeviceId == "-1" or not iosDeviceId == "":
command.append("--id")
command.append(iosDeviceId)
memleakDownloaded = ExecuteCommand(command, sys.stdout)
if memleakDownloaded == 0:
print("Memleaks file downloaded for:" + bundleID)
leaksDetected = FindMemoryLeaks("Library/Application Support/"+ memleakFile)
else:
print("[Error] Memleaks file could not be downloaded for:" + bundleID)
else:
print("[Error] Bundle ID NOT found:" + bundleID)
else:
command = ["./" + filename + ".app/Contents/MacOS/" + filename]
retCode = ExecuteTest(command, filename, False)
leaksDetected = FindMemoryLeaks(memleakFile)
if retCode == 0 and leaksDetected == True:
lastSuccess = successfulTests.pop()
failedTests.append({'name':lastSuccess['name'], 'gpu':lastSuccess['gpu'], 'reason':"Memory Leaks"})
errorOccured = True
if retCode != 0:
errorOccured = True
#set working dir to initial
os.chdir(currDir)
if errorOccured:
return -1
else:
return 0
def GetXcodeSchemes(targetPath, getMacOS, getIOS):
command = ["xcodebuild", "-list"]
if ".xcworkspace" in targetPath:
command.append("-workspace")
command.append(targetPath)
elif ".xcodeproj" in targetPath:
command.append("-project")
command.append(targetPath)
schemesList = ExecuteCommandWOutput(command)
parsedSchemes = []
filteredSchemes = []
#try to detect any error and return null if detected
#also detect any informational line (such as List of Schemes:)
#As fas as I've seen the information lines all have : at the end
schemesStartFound = False
for line in schemesList:
if b"error" in line:
print("Error retrieving the schemes from: " + targetPath)
print(line)
return []
if b"Schemes:" in line:
schemesStartFound = True
continue
buildAllFound = False
if schemesStartFound:
line = line.strip()
if line.isspace() or not line:
break
if b":" in line:
break
if b"BuildAll" in line:
buildAllFound = True
#add scheme
parsedSchemes.append(line)
buildBothOS = getMacOS and getIOS
for scheme in parsedSchemes:
#current scheme is build all but we not building both platforms
#filter it out
if b"BuildAll" in scheme and not buildBothOS:
continue
#building both platforms and we found a build all scheme
#filter all the other schemes out
if not b"BuildAll" in scheme and buildBothOS and buildAllFound:
continue
#filter macos scheme if necessary
if not getMacOS and "iOS" not in scheme:
continue
#filter ios scheme if necessary
if not getIOS and "iOS" in scheme:
continue
filteredSchemes.append(scheme)
return filteredSchemes
#Helper to create xcodebuild command for given scheme, workspace(full path from current working directory) and configuration(Debug, Release)
#can filter out schemes based on what to skip and will return "" in those cases
def CreateXcodeBuildCommand(skipMacos, skipIos, skipIosCodeSigning,path,scheme,configuration, isWorkspace, ddPath, printBuildOutput):
logLevel = "-quiet"
if printBuildOutput:
logLevel = "-hideShellScriptEnvironment"
if isWorkspace and "BuildAll" in scheme:
#build all projects in workspace using special BuildAll scheme. enables more parallel builds
command = ["xcodebuild",logLevel,"-workspace",path,"-configuration",configuration,"build","-scheme","BuildAll", "-parallelizeTargets"]
elif isWorkspace and scheme != "":
command = ["xcodebuild",logLevel,"-workspace",path,"-configuration",configuration,"build","-parallelizeTargets", "-scheme",scheme]
elif not isWorkspace:
#if filtering platforms then we build using schemes
if scheme != "" and (skipMacos or skipIos):
command = ["xcodebuild",logLevel,"-project",path,"-configuration",configuration,"build", "-parallelizeTargets", "-scheme", scheme]
else:
#otherwise build all targets of projects in parallel
command = ["xcodebuild",logLevel,"-project",path,"-configuration",configuration,"build","-scheme", scheme, "-parallelizeTargets"]
else:
return ""
#use the -derivedDataPath flag only when custom location is specified by ddPath otherwise use the default location
if ddPath != 'Null':
command.append("-derivedDataPath")
command.append(ddPath)
if skipIosCodeSigning:
command.extend([
"CODE_SIGN_IDENTITY=\"\"",
"CODE_SIGNING_REQUIRED=\"NO\"",
"CODE_SIGN_ENTITLEMENTS=\"\"",
"CODE_SIGNING_ALLOWED=\"NO\""])
return command
def ListDirs(path):
return [dir for dir in os.listdir(path) if os.path.isdir(os.path.join(path,dir))]
def BuildXcodeProjects(skipMacos, skipIos, skipIosCodeSigning, skipDebugBuild, skipReleaseBuild, printXcodeBuild, derivedDataPath):
errorOccured = False
buildConfigurations = ["Debug", "Release"]
if skipDebugBuild:
buildConfigurations.remove("Debug")
if skipReleaseBuild:
buildConfigurations.remove("Release")
#since our projects for macos are all under a macos Xcode folder we can search for
#that specific folder name to gather source folders containing project/workspace for xcode
#macSourceFolders = FindFolderPathByName("Examples_3/","macOS Xcode", -1)
xcodeProjects = [ "/Examples_3/Ephemeris/macOS Xcode/Ephemeris/Ephemeris.xcodeproj",
"/Examples_3/Visibility_Buffer/macOS Xcode/Visibility_Buffer.xcodeproj",
"/Examples_3/Unit_Tests/macOS Xcode/Unit_Tests.xcworkspace"]
#if derivedDataPath is not specified then use the default location
if derivedDataPath == 'Null':
DDpath = derivedDataPath
else:
#Custom Derived Data location relative to root of project
DDpath = os.path.join(os.getcwd(), derivedDataPath)
if os.path.exists(DDpath):
#delete the contents of subdirectories at the location
shutil.rmtree(DDpath)
#Create a custom directory
os.mkdir(DDpath)
for proj in xcodeProjects:
#get working directory (excluding the xcodeproj in path)
rootPath = os.getcwd() + os.sep.join(proj.split(os.sep)[0:-1])
#save current work dir
currDir = os.getcwd()
#change dir to xcworkspace location
os.chdir(rootPath)
#create command for xcodebuild
filenameWExt = proj.split(os.sep)[-1]
filename = filenameWExt.split(os.extsep)[0]
extension = filenameWExt.split(os.extsep)[1]
#get and filter xcode schemes
schemesList = GetXcodeSchemes(filenameWExt, not skipMacos, not skipIos)
#if building both iOS and macOS then build them in parallel
#by building whole project instead of schemes
if "xcodeproj" in extension and not (skipMacos or skipIos):
#no need for any schemes we will build whole project
schemesList = [filename]
else:
for scheme in schemesList:
if b"BuildAll" in scheme:
#remove all other schemes
schemesList = ["BuildAll"]
break
for conf in buildConfigurations:
#will build all targets for vien project
#canot remove ios / macos for now
for scheme in schemesList:
command = CreateXcodeBuildCommand(skipMacos, skipIos, skipIosCodeSigning, filenameWExt,scheme,conf, "xcworkspace" in extension, DDpath, printXcodeBuild)
platformName = "macOS/iOS"
if "iOS" in scheme:
platformName = "iOS"
elif "BuildAll" not in scheme:
platformName = "macOS"
#just switch otu filename and scheme in case we are building BuildAll
#display the project name intead.
if "BuildAll" in scheme:
sucess = ExecuteBuild(command, filename, conf, platformName)
else:
sucess = ExecuteBuild(command, filename + "/" + scheme, conf, platformName)
if sucess != 0:
errorOccured = True
os.chdir(currDir)
if errorOccured == True:
return -1
return 0
#this needs the vulkan environment variables set up correctly
#if they are not in ~/.profile then they need to be set up for every subprocess
#If it is in ~/.profile then it needs to be maintaned by updating the version number in ~/.profile.
def BuildLinuxProjects():
errorOccured = False
projsToBuild = GetFilesPathByExtension("./Examples_3/","workspace", False)
for projectPath in projsToBuild:
#get working directory (excluding the workspace in path)
rootPath = os.sep.join(projectPath.split(os.sep)[0:-1])
#save current work dir
currDir = os.getcwd()
#change dir to workspace location
os.chdir(rootPath)
configurations = ["Debug", "Release"]
for conf in configurations:
#create command for xcodebuild
#filename = projectPath.split(os.sep)[-1].split(os.extsep)[0]
filename = projectPath.split(os.sep)[-1]
#need to parse xml configuration to get every project
xmlTree = ET.parse("./"+filename)
xmlRoot = xmlTree.getroot()
ubuntuProjects = []
for child in xmlRoot:
if child.tag == "Project":
if child.attrib["Name"] != "OSBase" and child.attrib["Name"] != "EASTL" and child.attrib["Name"] != "OS" and child.attrib["Name"] != "Renderer" and child.attrib["Name"] != "SpirVTools" and child.attrib["Name"] != "PaniniProjection" and child.attrib["Name"] != "gainput" and child.attrib["Name"] != "ozz_base" and child.attrib["Name"] != "ozz_animation" and child.attrib["Name"] != "Assimp" and child.attrib["Name"] != "zlib" and child.attrib["Name"] != "LuaManager" and child.attrib["Name"] != "AssetPipeline" and child.attrib["Name"] != "AssetPipelineCmd" and child.attrib["Name"] != "ozz_animation_offline":
ubuntuProjects.append(child.attrib["Name"])
for proj in ubuntuProjects:
command = ["codelite-make","-w",filename,"-p", proj,"-c",conf]
#sucess = ExecuteBuild(command, filename+"/"+proj,conf, "Ubuntu")
sucess = ExecuteCommand(command, sys.stdout)
if sucess != 0:
errorOccured = True
command = ["make", "-s"]
sucess = ExecuteBuild(command, filename+"/"+proj,conf, "Ubuntu")
if sucess != 0:
errorOccured = True
#set working dir to initial
os.chdir(currDir)
if errorOccured == True:
return -1
return 0
#this needs the vulkan environment variables set up correctly
#if they are not in ~/.profile then they need to be set up for every subprocess
#If it is in ~/.profile then it needs to be maintaned by updating the version number in ~/.profile.
def TestLinuxProjects():
errorOccured = False
projsToTest = GetFilesPathByExtension("./Examples_3/","workspace", False)
for projectPath in projsToTest:
#get working directory (excluding the workspace in path)
rootPath = os.sep.join(projectPath.split(os.sep)[0:-1])
#save current work dir
currDir = os.getcwd()
#change dir to workspace location
os.chdir(rootPath)
configurations = ["Release"]
for conf in configurations:
#create command for xcodebuild
filename = projectPath.split(os.sep)[-1].split(os.extsep)[0]
#filename = projectPath.split(os.sep)[-1]
#need to parse xml configuration to get every project
xmlTree = ET.parse("./"+filename + ".workspace")
xmlRoot = xmlTree.getroot()
ubuntuProjects = []
for child in xmlRoot:
if child.tag == "Project":
if child.attrib["Name"] != "OSBase" and child.attrib["Name"] != "EASTL" and child.attrib["Name"] != "OS" and child.attrib["Name"] != "Renderer" and child.attrib["Name"] != "SpirVTools" and child.attrib["Name"] != "PaniniProjection" and child.attrib["Name"] != "gainput" and child.attrib["Name"] != "ozz_base" and child.attrib["Name"] != "ozz_animation" and child.attrib["Name"] != "Assimp" and child.attrib["Name"] != "zlib" and child.attrib["Name"] != "LuaManager" and child.attrib["Name"] != "AssetPipeline" and child.attrib["Name"] != "AssetPipelineCmd" and child.attrib["Name"] != "MeshOptimizer" and child.attrib["Name"] != "ozz_animation_offline":
ubuntuProjects.append(child.attrib["Name"])
for proj in ubuntuProjects:
leaksDetected = False
exePath = os.path.join(os.getcwd(),proj,conf,proj)
command = [exePath]
retCode = ExecuteTest(command, proj ,False)
if retCode != 0:
errorOccured = True
memleaksFilename = os.path.join(os.getcwd(),proj,conf,GetMemLeakFile(proj))
leaksDetected = FindMemoryLeaks(memleaksFilename)
if retCode == 0 and leaksDetected == True:
lastSuccess = successfulTests.pop()
failedTests.append({'name':lastSuccess['name'], 'gpu':lastSuccess['gpu'], 'reason':"Memory Leaks"})
errorOccured = True
#set working dir to initial
os.chdir(currDir)
if errorOccured == True:
return -1
return 0
def TestWindowsProjects(useActiveGpuConfig):
errorOccured = False
isWindows7 = int(platform.release()) < 10
if not isWindows7:
try:
bat_dir = os.path.join(os.getcwd(), 'Common_3\\ThirdParty\\OpenSource\\hlslparser\\Test')
bat_path = os.path.join(bat_dir, 'compile.bat')
testout = subprocess.Popen([bat_path], cwd=bat_dir, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, encoding='utf-8').communicate()[0]
if re.search(r'\berror\b', testout, re.M|re.I):
print("HLSLParser test failed: there are errors in output")
return -1
except Exception as ex:
return -1
projects = GetFilesPathByExtension("./Examples_3","exe",False)
fileList = []
for proj in projects:
#we don't want to build Xbox one solutions when building PC
#we don't want to run ImageConvertTools when building PC
#we don't want to run AssetPipelineCmd when building PC
if "PC Visual Studio 2017" in proj and "Release" in proj and not "ImageConvertTools" in proj and not "AssetPipelineCmd" in proj :
fileList.append(proj)
if not isWindows7:
fileList.append('.\\Common_3\\ThirdParty\\OpenSource\\hlslparser\\Parser\\x64_ReleaseTest\\Parser.exe')
for proj in fileList:
leaksDetected = False
#get current path for sln file
#strip the . from ./ in the path
#replace / by the os separator in case we need // or \\
rootPath = os.getcwd() + proj.strip('.')
rootPath = rootPath.replace("/",os.sep)
#need to get root folder of path by stripping the filename from path
rootPath = rootPath.split(os.sep)[0:-1]
rootPath = os.sep.join(rootPath)
#save root directory where python is executed from
currDir = os.getcwd()
#change working directory to sln file
os.chdir(rootPath)
filename = proj.split(os.sep)[-1]
origFilename = filename
command = [filename]
if "ReleaseVk" in proj:
filename = "VK_" + filename
elif "Dx11" in proj:
filename = "Dx11_" + filename
elif "hlslparser" not in proj:
filename = "Dx12_" + filename
parentFolder = proj.split(os.sep)[1]
# get the memory leak file path
memleakFile = GetMemLeakFile(origFilename)
# delete the memory leak file before execute the app
if os.path.exists(memleakFile):
os.remove(memleakFile)
if useActiveGpuConfig == True not in parentFolder:
currentGpuRun = 0
resultGpu = selectActiveGpuConfig(currDir, parentFolder,origFilename,currentGpuRun)
while resultGpu['running'] == True:
retCode = ExecuteTest(command, filename, False, resultGpu['lineMatch'])
currentGpuRun += 1
resultGpu = selectActiveGpuConfig(currDir, parentFolder,origFilename,currentGpuRun)
else:
retCode = ExecuteTest(command, filename,False)
leaksDetected = FindMemoryLeaks(memleakFile)
if retCode == 0 and leaksDetected == True:
lastSuccess = successfulTests.pop()
failedTests.append({'name':lastSuccess['name'], 'gpu':lastSuccess['gpu'], 'reason':"Memory Leaks"})
errorOccured = True
if retCode != 0:
errorOccured = True
os.chdir(currDir)
if errorOccured == True:
return -1
return 0
def TestXboxProjects():
errorOccured = False
FNULL = open(os.devnull, 'w')
#Get console IP
consoleIP = ""
xdkDir = os.environ['DURANGOXDK'] + 'bin/'
command = [xdkDir+'xbconnect', '/QG']
output = subprocess.Popen(command, stdin=None, stdout=subprocess.PIPE, stderr=FNULL)
output = output.communicate()[0]
connection = re.search(b'Connections at (.+?), ', output)
if connection:
consoleIP = connection.group(1)
consoleIP = consoleIP.decode('utf-8')
for output_line in output.split(b'\n'):
if b"TITLE: Unreachable." in output_line:
print ("Unable to connect to: "+consoleIP)
return 1
crashdump_path = '\\\\'+consoleIP+"\TitleScratch\LocalDumps"
#Clean all apps
print ("Cleaning XBox apps and data (this will reboot the console)")
command = [xdkDir+'xbcleanup', '/U /D /P /C /L']
output = subprocess.check_output(command, None, stderr = subprocess.STDOUT)
print ("Done cleaning...")
#Set console setting to genereate crash dumps
command = [xdkDir+'xbconfig','CrashDumpType=mini',"/X"+consoleIP]
output = subprocess.check_output(command, None, stderr = subprocess.STDOUT)
try:
#Get count of existing crash dumps
command = ["cmd", "/c", "dir", crashdump_path]
output = subprocess.check_output(command)
output = output.split(b'.exe')
crashDumpCount = len(output) - 1
except Exception as ex:
print(ex)
crashDumpCount = 0
#get paths for exe in Loose folder
projects = GetFilesPathByExtension("Xbox/Examples_3","exe",False)
fileList = []
for proj in projects:
if "XBOXOne Visual Studio 2017" in proj:# and "Release" in proj:
if "Loose" in proj:
fileList.append(os.path.dirname(proj))
#Deploy Loose folders and store app names
appList = []
for filename in fileList:
#deploy app to xbox
command = [xdkDir+'xbapp',"deploy",filename,"/X"+consoleIP]
print ("Deploying: " + filename)
output = subprocess.check_output(command, None, stderr = subprocess.STDOUT)
output = output.decode('utf-8')
#Extract App Name from output
appName = "InvalidAppName"
for item in output.split("\n"):
if "App" in item:
appName = item.strip()
appList.append(appName)
print ("Successfully deployed: " + appName)
if appName == "InvalidAppName":
print ("Failed to deploy: " + filename)
failedTests.append({'name':filename, 'gpu':"", 'reason':"Invalid app name"})
errorOccured = True
print ("")
#Launch the deployed apps
for appName in appList:
command = [xdkDir+'xbapp',"launch","/X"+consoleIP, appName]
print ("Executing command: " + ' '.join(command))
output = subprocess.check_output(command, None, stderr = subprocess.STDOUT)
#Make sure app launches
isRunning = int(0)
command = [xdkDir+'xbapp',"query","/X"+consoleIP, appName]
output = subprocess.check_output(command, None, stderr = subprocess.STDOUT)
for s in output.split():
if s.isdigit():
isRunning = int(s)
print ("The operation completed successfully")
if isRunning == 0:
errorOccured = True
print ("The operation failed")
failedTests.append({'name':appName, 'gpu':"", 'reason':"Failed to launch app"})
continue
#Check if app terminatese or times out
timeout = time.time() + float(maxIdleTime)
while isRunning != 0 and time.time() < timeout:
output = subprocess.check_output(command, None, stderr = subprocess.STDOUT)
for s in output.split():
if s.isdigit():
isRunning = int(s)
# Timeout Error
if isRunning != 0:
errorOccured = True
print ("Timeout: " + appName + "\n")
command = [xdkDir+'xbapp',"terminate","/X"+consoleIP, appName]
output = subprocess.check_output(command, None, stderr = subprocess.STDOUT)
failedTests.append({'name':appName, 'gpu':"", 'reason':"Runtime failure"})
else:
testingComplete = True
#Wait for crash dump folder to be discoverable and get count of crash dumps
command = ["cmd", "/c", "dir", crashdump_path]
rc = 1
while rc != 0:
rc = subprocess.call(command, stdin=None, stdout=FNULL, stderr=FNULL)
output = subprocess.check_output(command, None, stderr = subprocess.STDOUT)
output = output.decode('utf-8').split('.exe')
#Check if a new crash dump was generated
if (len(output) - 1 > crashDumpCount):
crashDumpCount = len(output) - 1
testingComplete = False
# get the memory leak file path
memleakPath = '\\\\'+consoleIP+'\\'
appFileName = appName.split("!")[0]
appNameParts = appFileName.split('_')
memleakPath = memleakPath + appNameParts[0]+ "_1.0.0.0_x64__" + appNameParts[1]
memleakPath = GetFilesPathByExtension(memleakPath,"exe",False)
memleakPath = memleakPath[0].split('.exe')[0]+".memleaks"
leaksDetected = FindMemoryLeaks(memleakPath)
if testingComplete and leaksDetected == True:
errorOccured = True
failedTests.append({'name':appName, 'gpu':"", 'reason':"Memory Leaks"})
elif testingComplete:
print ("Successfully ran " + appName + "\n")
successfulTests.append({'name': appName, 'gpu': ""})
else:
errorOccured = True
print ("Application Terminated Early: " + appName + "\n")
failedTests.append({'name': appName, 'gpu': "", 'reason': "Runtime failure"})
#Copy crash dumps to PC and delete them from the console
command = ["xcopy", crashdump_path, "C:\\dumptemp\\", "/s", "/e"]
output = subprocess.check_output(command, None, stderr = subprocess.STDOUT)
copy_tree("C:\\dumptemp", "C:\\XboxOneCrashDumps")
shutil.rmtree("C:\\dumptemp")
shutil.rmtree(crashdump_path)
os.makedirs(crashdump_path)
FNULL.close()
if errorOccured == True:
return -1
return 0
def TestNintendoSwitchProjects():
errorOccured = False
switchToolsDir = os.environ['NINTENDO_SDK_ROOT'] + '/Tools/CommandLineTools/'
controlTargetExe = os.path.join(switchToolsDir, "ControlTarget.exe")
runOnTargetExe = os.path.join(switchToolsDir, "RunOnTarget.exe")
#get paths for exe in Loose folder
projects = GetFilesPathByExtension("./Switch/Examples_3","nspd",True)
fileList = []
for proj in projects:
if "NX Visual Studio 2017" in proj and "Release" in proj:
fileList.append(proj)
#Launch the deployed apps
for proj in fileList:
filename = proj.split(os.path.sep)[-1]
command = runOnTargetExe + ' "'+proj+'"'+ ' --failure-timeout '+str(maxIdleTime)+' --pattern-failure-exit "Assert|Break|Panic|Halt|Fatal|GpuCoreDumper"'
if "Debug" in proj:
filename = "Debug_"+filename
else:
filename = "Release_"+filename
retCode = ExecuteTest(command, filename, False)
command = [controlTargetExe, "terminate"]
ExecuteCommand(command, None)
if retCode != 0:
command = [controlTargetExe, "reset"]
ExecuteCommand(command, None)
errorOccured = True
#print(output)
if errorOccured == True:
return -1
return 0
def TestOrbisProjects():
errorOccured = False
FNULL = open(os.devnull, 'w')
#get paths for exe in Loose folder
projects = GetFilesPathByExtension("PS4/Examples_3","elf",False)
fileList = []
workingDirList = []
errorOccured = False
for proj in projects:
if "PS4 Visual Studio 2017" in proj and "Release" in proj:
fileList.append(proj)
if "Unit_Tests" in proj:
workingDirList.append(os.path.dirname(os.path.dirname(os.path.dirname(proj))) + "/" + os.path.splitext(os.path.basename(proj))[0])
else:
workingDirList.append(os.path.dirname(os.path.dirname(os.path.dirname(proj))))
for filename, workingDir in zip(fileList, workingDirList):
memleakFile = workingDir + "/Resources/app.memleaks"
# delete the memory leak file before execute the app
if os.path.exists(memleakFile):
os.remove(memleakFile)
command = ["orbis-run.exe" ,"/debug" ,"/kill" , "/workingDirectory:" , workingDir, "/elf", filename ]
retCode = ExecuteTest(command, os.path.splitext(os.path.basename(filename))[0], False)
leaksDetected = FindMemoryLeaks(memleakFile)
if retCode == 0 and leaksDetected == True:
lastSuccess = successfulTests.pop()
failedTests.append({'name':lastSuccess['name'], 'gpu':lastSuccess['gpu'], 'reason':"Memory Leaks"})
errorOccured = True
if retCode != 0:
errorOccured = True
if errorOccured:
return -1
return 0
def AndroidADBCheckRunningProcess(adbCommand, processName, packageName):
output = processName
waitingForExit = True
while waitingForExit == True:
output = ExecuteCommandWOutput(adbCommand, False)
output = (b"".join(output)).decode('utf-8')
print(output)
# try to match the number of leaks. if this doesn't match a valid ressult then no leaks were detected.
runningMatch = re.findall(r"(S|D)\s+com.forge.unittest", output, re.MULTILINE | re.IGNORECASE)
print(runningMatch)
if len(runningMatch) > 0:
value = runningMatch[0]
#try to print only section containing the source of leaks
if value == "D":
return True
if processName not in output:
waitingForExit = False
return True
def TestAndroidProjects():
errorOccured = False
projects = GetFilesPathByExtension("./Examples_3/Unit_Tests/Android_VisualStudio2017","apk",False)
fileList = []
for proj in projects:
if "Android_VisualStudio2017" in proj and "Release" in proj and "Packaging" not in proj:
fileList.append(proj)
for proj in fileList:
leaksDetected = False
#get current path for sln file
#strip the . from ./ in the path
#replace / by the os separator in case we need // or \\
rootPath = os.getcwd() + proj.strip('.')
rootPath = rootPath.replace("/",os.sep)
#need to get root folder of path by stripping the filename from path
rootPath = rootPath.split(os.sep)[0:-1]
rootPath = os.sep.join(rootPath)
#save root directory where python is executed from
currDir = os.getcwd()
apkName = proj.split(os.sep)[-1]
filenameNoExt = apkName.split('.')[0]
fullAppName = "com.forge.unittest." + filenameNoExt
#change working directory to sln file
os.chdir(rootPath)
#origFilename = filename
unlockScreenCommand = ["adb","shell", "input", "keyevent", "82"]
uninstallCommand = ["adb", "uninstall",fullAppName]
grepPSCommand = ["adb", "shell", "ps","| grep", fullAppName]
installCommand = ["adb", "install", "-r", apkName]
runCommand = ["adb", "shell", "am", "start", "-W", "-n", fullAppName + "/android.app.NativeActivity"]
stopAppCommand = ["adb", "shell", "am", "force-stop" , apkName]
logCatCommand = ["adb", "logcat","-d", "-s", "The-Forge", "the-forge-app"]
clearLogCatCommand = ["adb", "logcat", "-c"]
# get the memory leak file path
# memleakFile = GetMemLeakFile(origFilename)
# delete the memory leak file before execute the app
# if os.path.exists(memleakFile):
# os.remove(memleakFile)
ExecuteCommand(unlockScreenCommand, None)
retCode = ExecuteCommand(uninstallCommand, None)
retCode = ExecuteCommand(installCommand, sys.stdout)
ExecuteCommand(clearLogCatCommand, None)
retCode = ExecuteTest(runCommand, filenameNoExt, True)
AndroidADBCheckRunningProcess(grepPSCommand, filenameNoExt, apkName)
time.sleep(2)
output = ExecuteCommandWOutput(logCatCommand)
output = (b"\n".join(output).decode('utf-8'))
print(output)
if "Success terminating application" not in output:
retCode = 1
lastSuccess = successfulTests.pop()
failedTests.append({'name':lastSuccess['name'], 'gpu':lastSuccess['gpu'], 'reason':"Runtime Failure"})
else : retCode = 0
ExecuteCommand(stopAppCommand, sys.stdout)
#leaksDetected = FindMemoryLeaks(memleakFile)
# if retCode == 0:# and leaksDetected == True:
if retCode != 0:
print("Error while running " + filenameNoExt)
errorOccured = True
os.chdir(currDir)
if errorOccured == True:
return -1
return 0
#this needs the JAVA_HOME environment variable set up correctly
def BuildAndroidProjects(skipDebug, skipRelease, printMSBuild):
errorOccured = False
msBuildPath = FindMSBuild17()
androidConfigurations = ["Debug", "Release"]
androidPlatform = ["ARM64"]
if skipDebug:
androidConfigurations.remove("Debug")
if skipRelease:
androidConfigurations.remove("Release")
if msBuildPath == "":
print("Could not find MSBuild 17, Is Visual Studio 17 installed ?")
sys.exit(-1)
projects = GetFilesPathByExtension("./Jenkins/","buildproj",False)
fileList = []
for proj in projects:
if "Android" in proj:
fileList.append(proj)
#if MSBuild tasks were not found then parse all projects
if len(fileList) == 0:
fileList = GetFilesPathByExtension("./Examples_3/Unit_Tests/Android_VisualStudio2017/","sln",False)
msbuildVerbosity = "/verbosity:minimal"
msbuildVerbosityClp = "/clp:ErrorsOnly;WarningsOnly;Summary"
if printMSBuild:
msbuildVerbosity = "/verbosity:normal"
msbuildVerbosityClp = "/clp:Summary;PerformanceSummary"
for proj in fileList:
#get current path for sln file
#strip the . from ./ in the path
#replace / by the os separator in case we need // or \\
rootPath = os.getcwd() + proj.strip('.')
rootPath = rootPath.replace("/",os.sep)
#need to get root folder of path by stripping the filename from path
rootPath = rootPath.split(os.sep)[0:-1]
rootPath = os.sep.join(rootPath)
#save root directory where python is executed from
currDir = os.getcwd()
#change working directory to sln file
os.chdir(rootPath)
#strip extension
filename = proj.split(os.sep)[-1]
for platform in androidPlatform:
if ".sln" in proj:
for conf in androidConfigurations:
command = [msBuildPath ,filename,"/p:Configuration="+conf,"/p:Platform=" + platform,"/nr:false",msbuildVerbosityClp,msbuildVerbosity,"/t:Build"]
#print(command)
retCode = ExecuteBuild(command, filename, conf, platform)
elif ".buildproj" in proj:
command = [msBuildPath ,filename,"/p:Platform=" + platform,"/m","/nr:false",msbuildVerbosityClp,msbuildVerbosity,"/t:Build"]
retCode = ExecuteBuild(command, filename, "All Configurations", platform)
if retCode != 0:
errorOccured = True
os.chdir(currDir)
if errorOccured == True:
return -1
return 0
def BuildWindowsProjects(xboxDefined, xboxOnly, skipDebug, skipRelease, printMSBuild, skipAura, skipDX11, isSwitch):
errorOccured = False
msBuildPath = FindMSBuild17()
if msBuildPath == "":
print("Could not find MSBuild 17, Is Visual Studio 17 installed ?")
sys.exit(-1)
pcConfigurations = ["DebugDx", "ReleaseDx", "DebugVk", "ReleaseVk", "DebugDx11", "ReleaseDx11"]
pcPlatform = "x64"
isWindows7 = int(platform.release()) < 10
if skipDebug:
pcConfigurations.remove("DebugDx")
pcConfigurations.remove("DebugVk")
pcConfigurations.remove("DebugDx11")
if skipRelease:
pcConfigurations.remove("ReleaseDx")
pcConfigurations.remove("ReleaseVk")
pcConfigurations.remove("ReleaseDx11")
if skipDX11:
if "DebugDx11" in pcConfigurations : pcConfigurations.remove("DebugDx11")
if "ReleaseDx11" in pcConfigurations : pcConfigurations.remove("ReleaseDx11")
if isSwitch:
pcConfigurations = ["DebugVK", "ReleaseVK"]
switchPlatform = "NX64"
if isWindows7:
print("Detected Windows 7")
if "DebugDx" in pcConfigurations : pcConfigurations.remove("DebugDx")
if "ReleaseDx" in pcConfigurations : pcConfigurations.remove("ReleaseDx")
skipAura = True
xboxPlatform = "Durango"
if isSwitch:
projects = GetFilesPathByExtension("./Switch/Examples_3/","sln",False)
else:
projects = GetFilesPathByExtension("./Jenkins/","buildproj",False)
#if MSBuild tasks were not found then parse all projects
if len(projects) == 0:
projects = GetFilesPathByExtension("./Examples_3/","sln",False)
fileList = []
msbuildVerbosity = "/verbosity:minimal"
msbuildVerbosityClp = "/clp:ErrorsOnly;WarningsOnly;Summary"
if printMSBuild:
msbuildVerbosity = "/verbosity:normal"
msbuildVerbosityClp = "/clp:Summary;PerformanceSummary"
if not xboxOnly and not isSwitch:
for proj in projects:
if skipAura == True and "Aura" in proj:
continue
if "Android" in proj:
continue
if isWindows7 == True and "HLSLParser" in proj:
continue
if isWindows7 == True:
if ".buildproj" in proj and "Win7" in proj:
fileList.append(proj)
elif "Win7" in proj:
continue
#we don't want to build Xbox one solutions when building PC
elif "Xbox" not in proj and "XBOXOne" not in proj:
fileList.append(proj)
if xboxDefined:
for proj in projects:
if skipAura == True and "Aura" in proj:
continue
if "Xbox" in proj or "XBOXOne" in proj:
fileList.append(proj)
if isSwitch:
for proj in projects:
if "Switch" in proj or "NX Visual Studio 2017" in proj:
fileList.append(proj)
for proj in fileList:
if "orbis" in proj.lower():
continue
#get current path for sln file
#strip the . from ./ in the path
#replace / by the os separator in case we need // or \\
rootPath = os.getcwd() + proj.strip('.')
rootPath = rootPath.replace("/",os.sep)
#need to get root folder of path by stripping the filename from path
rootPath = rootPath.split(os.sep)[0:-1]
rootPath = os.sep.join(rootPath)
#save root directory where python is executed from
currDir = os.getcwd()
#change working directory to sln file
os.chdir(rootPath)
configurations = pcConfigurations
#strip extension
filename = proj.split(os.sep)[-1]
#hard code the configurations for Aura for now as it's not implemented for Vulkan runtime
if filename == "Aura.sln":
if "DebugVk" in configurations : configurations.remove("DebugVk")
if "ReleaseVk" in configurations : configurations.remove("ReleaseVk")
if "DebugDx11" in configurations : configurations.remove("DebugDx11")
if "ReleaseDx11" in configurations : configurations.remove("ReleaseDx11")
elif filename == "VisibilityBuffer.sln" or filename == "Ephemeris.sln":
if "DebugDx11" in configurations : configurations.remove("DebugDx11")
if "ReleaseDx11" in configurations : configurations.remove("ReleaseDx11")
elif filename == "HLSLParser.sln":
configurations = ["Debug", "Release"]
if "Xbox" in proj or "XBOXOne" in proj:
currPlatform = xboxPlatform
elif "Switch" in proj or "NX Visual Studio 2017" in proj:
currPlatform = switchPlatform
else:
currPlatform = pcPlatform
#for conf in configurations:
if ".sln" in filename:
for conf in configurations:
if isSwitch:
command = [msBuildPath ,filename,"/p:Configuration="+conf,"/p:Platform=" + currPlatform,"/p:BuildInParallel=true","/nr:false",msbuildVerbosityClp,msbuildVerbosity,"/t:Build"]
else:
command = [msBuildPath ,filename,"/p:Configuration="+conf,"/p:Platform=" + currPlatform,"/m","/p:BuildInParallel=true","/nr:false",msbuildVerbosityClp,msbuildVerbosity,"/t:Build"]
if isWindows7:
command.append("/p:WindowsTargetPlatformVersion=8.1")
retCode = ExecuteBuild(command, filename,conf, currPlatform)
else:
command = [msBuildPath ,filename,"/p:Platform=" + currPlatform,"/m", "/nr:false",msbuildVerbosityClp,msbuildVerbosity,"/t:Build"]
if isWindows7:
command.append("/p:WindowsTargetPlatformVersion=8.1")
retCode = ExecuteBuild(command, filename,"All Configurations", currPlatform)
if retCode != 0:
errorOccured = True
os.chdir(currDir)
if errorOccured == True:
return -1
return 0
def BuildOrbisProjects(skipDebug, skipRelease, printMSBuild):
errorOccured = False
msBuildPath = FindMSBuild17()
configurations = ["Debug", "Release"]
platform = "ORBIS"
if skipDebug:
configurations.remove("Debug")
if skipRelease:
configurations.remove("Release")
#xboxConfigurations = ["Debug","Release"]
if msBuildPath == "":
print("Could not find MSBuild 17, Is Visual Studio 17 installed ?")
sys.exit(-1)
projects = GetFilesPathByExtension("./Jenkins/","buildproj",False)
#if MSBuild tasks were not found then parse all projects
if len(projects) == 0:
projects = GetFilesPathByExtension("./Examples_3/","sln",False)
fileList = []
msbuildVerbosity = "/verbosity:minimal"
msbuildVerbosityClp = "/clp:ErrorsOnly;WarningsOnly;Summary"
if printMSBuild:
msbuildVerbosity = "/verbosity:normal"
msbuildVerbosityClp = "/clp:Summary;PerformanceSummary"
for proj in projects:
if "Aura" in proj:
continue
if "Orbis" in proj:
fileList.append(proj)
for proj in fileList:
#get current path for sln file
#strip the . from ./ in the path
#replace / by the os separator in case we need // or \\
rootPath = os.getcwd() + proj.strip('.')
rootPath = rootPath.replace("/",os.sep)
#need to get root folder of path by stripping the filename from path
rootPath = rootPath.split(os.sep)[0:-1]
rootPath = os.sep.join(rootPath)
#save root directory where python is executed from
currDir = os.getcwd()
#change working directory to sln file
os.chdir(rootPath)
#strip extension
filename = proj.split(os.sep)[-1]
#hard code the configurations for Aura for now as it's not implemented for Vulkan runtime
if filename == "Aura.sln":
if "DebugVk" in configurations : configurations.remove("DebugVk")
if "ReleaseVk" in configurations : configurations.remove("ReleaseVk")
if "DebugDx11" in configurations : configurations.remove("DebugDx11")
if "ReleaseDx11" in configurations : configurations.remove("ReleaseDx11")
elif filename == "VisibilityBuffer.sln":
if "DebugDx11" in configurations : configurations.remove("DebugDx11")
if "ReleaseDx11" in configurations : configurations.remove("ReleaseDx11")
elif filename == "HLSLParser.sln":
configurations = ["Debug", "Release"]
#for conf in configurations:
if ".sln" in filename:
for conf in configurations:
command = [msBuildPath ,filename,"/p:Configuration="+conf,"/p:Platform=" + platform,"/m","/p:BuildInParallel=true","/nr:false",msbuildVerbosityClp,msbuildVerbosity,"/t:Build"]
retCode = ExecuteBuild(command, filename,conf, platform)
else:
command = [msBuildPath ,filename,"/p:Platform=" + platform,"/m", "/nr:false",msbuildVerbosityClp,msbuildVerbosity,"/t:Build"]
retCode = ExecuteBuild(command, filename,"All Configurations", platform)
if retCode != 0:
errorOccured = True
os.chdir(currDir)
if errorOccured == True:
return -1
return 0
#check memory leak file using regex
#searchs for %d memory leaks found:
#if it finds that string it will print the contents of the leaks file
#then returns True
#otherwise if no leaks found or the file doesn't exist return false
def FindMemoryLeaks(memLeakLog):
if not os.path.exists(memLeakLog):
print("Could not find the memory leak log file.")
print(memLeakLog)
return False
lines = []
with open(memLeakLog,'rt') as f:
lines = f.readlines()
lineContents = "".join(lines)
# try to match the number of leaks. if this doesn't match a valid ressult then no leaks were detected.
leaksMatch = re.findall(r"^(\d+)\s+memory leak+(s)? found:$", lineContents, re.MULTILINE | re.IGNORECASE)
if len(leaksMatch) > 0:
# find all text with ----- that separates the different memleaks sections
iteratorMatches = re.finditer(r"(----*.*?)$", lineContents, re.MULTILINE | re.IGNORECASE)
iterList = list(iteratorMatches)
print("Detected the following leaks")
#try to print only section containing the source of leaks
if len(iterList) > 3:
print (lineContents[iterList[2].start(0):iterList[3].end(0)])
else:
#if failed to get correct section then print whole file
print (lineContents)
return True
print("No Leaks detected.")
return False
def CleanupHandler(signum, frame):
global setDefines
global setMemTracker
print("Bye.")
#need to change to rootpath otherwise
#os won't find the files to modify
os.chdir(sys.path[0])
if setDefines == True or setMemTracker == True:
#Remove all defines for automated testing
print("Removing defines that got added for automated testing")
RemoveTestingPreProcessor()
exit(1)
#create global variable for interrupt handler
setDefines = False
setMemTracker = False
def MainLogic():
global setDefines
global setMemTracker
global maxIdleTime
#TODO: Maybe use simpler library for args
parser = argparse.ArgumentParser(description='Process the Forge builds')
parser.add_argument('--clean', action="store_true", help='If enabled, will delete all unversioned and untracked files/folder excluding the Art folder.')
parser.add_argument('--prebuild', action="store_true", help='If enabled, will run PRE_BUILD if assets do not exist.')
parser.add_argument('--forceprebuild', action="store_true", help='If enabled, will call PRE_BUILD even if assets exist.')
parser.add_argument('--xbox', action="store_true", help='Enable xbox building')
parser.add_argument('--xboxonly', action="store_true", help='Enable xbox building')
parser.add_argument('--switchNX', action="store_true", help='Enable Switch building')
parser.add_argument('--orbis', action="store_true", default=False, help='Enable orbis building')
parser.add_argument("--skipiosbuild", action="store_true", default=False, help='Disable iOS building')
parser.add_argument("--skipmacosbuild", action="store_true", default=False, help='Disable Macos building')
parser.add_argument("--skipioscodesigning", action="store_true", default=False, help='Disable iOS code signing during build stage')
parser.add_argument('--testing', action="store_true", help='Test the apps on current platform')
parser.add_argument('--ios', action="store_true", help='Needs --testing. Enable iOS testing')
parser.add_argument("--iosid", type=str, default="-1", help='Use a specific ios device. Id taken from ios-deploy --detect.')
parser.add_argument('--macos', action="store_true", help='Needs --testing. Enable macOS testing')
parser.add_argument('--android', action="store_true", help='Enable android building')
parser.add_argument('--defines', action="store_true", help='Enables pre processor defines for automated testing.')
parser.add_argument('--memtracking', action="store_true", help='Enables pre processor defines for memory tracking.')
parser.add_argument('--gpuselection', action="store_true", help='Enables pre processor defines for using active gpu determined from activeTestingGpu.cfg.')
parser.add_argument('--timeout',type=int, default="45", help='Specify timeout, in seconds, before app is killed when testing. Default value is 45 seconds.')
parser.add_argument('--skipdebugbuild', action="store_true", help='If enabled, will skip Debug build.')
parser.add_argument('--skipreleasebuild', action="store_true", help='If enabled, will skip Release build.')
parser.add_argument('--printbuildoutput', action="store_true", help='If enabled, will print output of project builds.')
parser.add_argument('--skipaura', action="store_true", help='If enabled, will skip building aura.')
parser.add_argument('--skipdx11', action="store_true", help='If enabled, will skip building DX11.')
parser.add_argument('--xcodederiveddatapath', type=str, default='Null', help = 'Uses a specific path relative to root of project for derived data. If null then it uses the default location for derived data')
parser.add_argument('--preserveworkingdir', action="store_true", help='If enabled, will keep working directory as is instead of changing it to path of PyBuild.')
#TODO: remove the test in parse_args
arguments = parser.parse_args()
#if we want to run based on active gpu config
#we need defines macros
if arguments.gpuselection:
arguments.defines = True
#add cleanup handler in case app gets interrupted
#keyboard interrupt
#removing defines
signal.signal(signal.SIGINT, CleanupHandler)
#change path to scripts location
if not arguments.preserveworkingdir:
os.chdir(sys.path[0])
returnCode = 0
if (arguments.xbox is not True and arguments.xboxonly is not True) or "XboxOneXDKLatest" not in os.environ:
arguments.xbox = False
arguments.xboxonly = False
#if we doing xbox only make sure the --xbox argument is enabled.
if arguments.xboxonly:
arguments.xbox = True
setDefines = arguments.defines
setMemTracker = arguments.memtracking
if setDefines == True or setMemTracker == True:
AddTestingPreProcessor(arguments.gpuselection)
#PRE_BUILD step
#if only the prebuild argument is provided but Art folder exists then PRE_BUILd isn't run
#if only the forceprebuild argument is provided PRE_BUILD runs even if art folder exists
#this is good for jenkins as we don't want to call PRE_BUILD if art asset exists
if arguments.prebuild == True or arguments.forceprebuild == True:
if os.path.isdir("./Art") == False or arguments.forceprebuild == True:
if platform.system() == "Windows":
ExecuteCommand(["PRE_BUILD.bat"], sys.stdout)
else:
ExecuteCommand(["sh","PRE_BUILD.command"], sys.stdout)
systemOS = platform.system()
if arguments.testing:
maxIdleTime = max(arguments.timeout,1)
#Build for Mac OS (Darwin system)
if systemOS == "Darwin":
returnCode = TestXcodeProjects(arguments.ios, arguments.macos, arguments.iosid)
elif systemOS == "Windows":
if arguments.orbis == True:
returnCode = TestOrbisProjects()
elif arguments.xbox == True:
returnCode = TestXboxProjects()
elif arguments.switchNX == True:
returnCode = TestNintendoSwitchProjects()
elif arguments.android == True:
returnCode = TestAndroidProjects()
else:
returnCode = TestWindowsProjects(arguments.gpuselection)
elif systemOS.lower() == "linux" or systemOS.lower() == "linux2":
returnCode = TestLinuxProjects()
else:
#Clean before Building removing everything but the art folder
if arguments.clean == True:
print("Cleaning the repo")
ExecuteCommand(["git", "clean" , "--exclude=Art","--exclude=/**/OpenSource/*", "-fdx"],sys.stdout)
ExecuteCommand(["git", "submodule", "foreach", "--recursive","git clean -fdfx"], sys.stdout)
#Build for Mac OS (Darwin system)
if systemOS== "Darwin":
returnCode = BuildXcodeProjects(arguments.skipmacosbuild,arguments.skipiosbuild, arguments.skipioscodesigning, arguments.skipdebugbuild, arguments.skipreleasebuild, arguments.printbuildoutput, arguments.xcodederiveddatapath)
elif systemOS == "Windows":
if arguments.android:
returnCode = BuildAndroidProjects(arguments.skipdebugbuild, arguments.skipreleasebuild, arguments.printbuildoutput)
elif arguments.orbis:
returnCode = BuildOrbisProjects(arguments.skipdebugbuild, arguments.skipreleasebuild, arguments.printbuildoutput)
else:
returnCode = BuildWindowsProjects(arguments.xbox, arguments.xboxonly, arguments.skipdebugbuild, arguments.skipreleasebuild, arguments.printbuildoutput, arguments.skipaura, arguments.skipdx11, arguments.switchNX)
elif systemOS.lower() == "linux" or systemOS.lower() == "linux2":
returnCode = BuildLinuxProjects()
PrintResults()
#Clean up
if arguments.defines:
print("Removing defines that got added for automated testing")
RemoveTestingPreProcessor()
#return for jenkins
sys.exit(returnCode)
if __name__ == "__main__":
MainLogic()
|
from league_api.api import ApiType
from typing import List, Mapping
class ChampionMastery(ApiType):
chestGranted: bool = None # Is chest granted for this champion or not in current season.
championLevel: int = None # Champion level for specified player and champion combination.
championPoints: int = None # Total number of champion points for this player and champion combination - they are used to determine championLevel.
championId: int = None # Champion ID for this entry.
championPointsUntilNextLevel: int = None # Number of points needed to achieve next level. Zero if player reached maximum champion level for this champion.
lastPlayTime: int = None # Last time this champion was played by this player - in Unix milliseconds time format.
tokensEarned: int = None # The token earned for this champion to levelup.
championPointsSinceLastLevel: int = None # Number of points earned since current level has been achieved.
summonerId: str = None # Summoner ID for this entry. (Encrypted)
@property
def chest_granted(self):
return self.chestGranted
@chest_granted.setter
def chest_granted(self, value):
self.chestGranted = value
@property
def champion_level(self):
return self.championLevel
@champion_level.setter
def champion_level(self, value):
self.championLevel = value
@property
def champion_points(self):
return self.championPoints
@champion_points.setter
def champion_points(self, value):
self.championPoints = value
@property
def champion_id(self):
return self.championId
@champion_id.setter
def champion_id(self, value):
self.championId = value
@property
def champion_points_until_next_level(self):
return self.championPointsUntilNextLevel
@champion_points_until_next_level.setter
def champion_points_until_next_level(self, value):
self.championPointsUntilNextLevel = value
@property
def last_play_time(self):
return self.lastPlayTime
@last_play_time.setter
def last_play_time(self, value):
self.lastPlayTime = value
@property
def tokens_earned(self):
return self.tokensEarned
@tokens_earned.setter
def tokens_earned(self, value):
self.tokensEarned = value
@property
def champion_points_since_last_level(self):
return self.championPointsSinceLastLevel
@champion_points_since_last_level.setter
def champion_points_since_last_level(self, value):
self.championPointsSinceLastLevel = value
@property
def summoner_id(self):
return self.summonerId
@summoner_id.setter
def summoner_id(self, value):
self.summonerId = value
|
#ifndef ossimPlanetQtLegendAnimationPathItem_HEADER
#define ossimPlanetQtLegendAnimationPathItem_HEADER
#include <ossimPlanetQt/ossimPlanetQtLegendItem.h>
#include <osg/AnimationPath>
#include <osg/ref_ptr>
class ossimPlanetQtLegendAnimationPathItem : public ossimPlanetQtLegendItem
{
public:
ossimPlanetQtLegendAnimationPathItem(QTreeWidgetItem* item,
const QString& name);
ossimPlanetQtLegendAnimationPathItem(QTreeWidget* treeWidget,
const QString& s);
ossimPlanetQtLegendAnimationPathItem();
void setAnimationPath(osg::ref_ptr<osg::AnimationPath> animationPath);
osg::ref_ptr<osg::AnimationPath> animationPath();
virtual void deleteLayer(bool deleteThis=false);
virtual ossimRefPtr<ossimXmlNode> saveXml()const;
virtual void loadXml(ossimRefPtr<ossimXmlNode> node,
std::vector<ossimPlanetOperation*>& activityList);
virtual const osg::ref_ptr<ossimPlanetLookAt> getLookAt()const;
virtual void setLookAt(osg::ref_ptr<ossimPlanetLookAt> lookAt);
virtual const osg::ref_ptr<ossimPlanetExtents> getExtents()const;
void getAnimationPathAsString(std::string& animationPath);
void setAnimationPathFromString(const std::string& animationPath);
protected:
osg::ref_ptr<osg::AnimationPath> theAnimationPath;
};
#endif
|
const walk = 500; // 500px
|
from vetka import db
from enum import Enum
class Priority(Enum):
low = 1
normal = 2
high = 3
GoodTag = db.Table('good_tag',
db.Column('good_id', db.Integer, db.ForeignKey('good.id')),
db.Column('category_id', db.Integer, db.ForeignKey('category.id')))
GoodReview = db.Table('good_review',
db.Column('good_id', db.Integer, db.ForeignKey('good.id')),
db.Column('review_id', db.Integer, db.ForeignKey('review.id')))
class Good(db.Model):
__tablename__ = 'good'
id = db.Column(db.Integer, primary_key=True)
product = db.Column(db.String)
name = db.Column(db.String)
description = db.Column(db.String)
category_id = db.Column(db.Integer, db.ForeignKey('category.id'))
image = db.Column(db.String)
name_en = db.Column(db.String)
price = db.Column(db.Integer)
priority = db.Column(db.Integer, default=2)
deleted = db.Column(db.Boolean, default=False)
tags = db.relationship('Category', secondary=GoodTag, lazy='dynamic')
reviews = db.relationship('Review', secondary=GoodReview, lazy='dynamic')
class Category(db.Model):
__tablename__ = 'category'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String)
description = db.Column(db.String)
name_en = db.Column(db.String)
deleted = db.Column(db.Boolean, default=False)
goods = db.relationship('Good', backref='category', lazy='dynamic')
goods2 = db.relationship('Good', secondary=GoodTag, lazy='dynamic')
primary = db.Column(db.Boolean, default=False)
class Review(db.Model):
__tablename__ = 'review'
id = db.Column(db.Integer, primary_key=True)
vk_id = db.Column(db.Integer)
vk_first_seen_name = db.Column(db.String, default='Anonymous')
t_comment = db.Column(db.DateTime)
vk_link = db.Column(db.String)
comment = db.Column(db.String)
deleted = db.Column(db.Boolean, default=False)
goods = db.relationship('Good', secondary=GoodReview, lazy='dynamic')
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['IotHubResourceArgs', 'IotHubResource']
@pulumi.input_type
class IotHubResourceArgs:
def __init__(__self__, *,
resource_group_name: pulumi.Input[str],
sku: pulumi.Input['IotHubSkuInfoArgs'],
etag: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
properties: Optional[pulumi.Input['IotHubPropertiesArgs']] = None,
resource_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a IotHubResource resource.
:param pulumi.Input[str] resource_group_name: The name of the resource group that contains the IoT hub.
:param pulumi.Input['IotHubSkuInfoArgs'] sku: IotHub SKU info
:param pulumi.Input[str] etag: The Etag field is *not* required. If it is provided in the response body, it must also be provided as a header per the normal ETag convention.
:param pulumi.Input[str] location: The resource location.
:param pulumi.Input['IotHubPropertiesArgs'] properties: IotHub properties
:param pulumi.Input[str] resource_name: The name of the IoT hub.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: The resource tags.
"""
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "sku", sku)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if location is not None:
pulumi.set(__self__, "location", location)
if properties is not None:
pulumi.set(__self__, "properties", properties)
if resource_name is not None:
pulumi.set(__self__, "resource_name", resource_name)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group that contains the IoT hub.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter
def sku(self) -> pulumi.Input['IotHubSkuInfoArgs']:
"""
IotHub SKU info
"""
return pulumi.get(self, "sku")
@sku.setter
def sku(self, value: pulumi.Input['IotHubSkuInfoArgs']):
pulumi.set(self, "sku", value)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
The Etag field is *not* required. If it is provided in the response body, it must also be provided as a header per the normal ETag convention.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
The resource location.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def properties(self) -> Optional[pulumi.Input['IotHubPropertiesArgs']]:
"""
IotHub properties
"""
return pulumi.get(self, "properties")
@properties.setter
def properties(self, value: Optional[pulumi.Input['IotHubPropertiesArgs']]):
pulumi.set(self, "properties", value)
@property
@pulumi.getter(name="resourceName")
def resource_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the IoT hub.
"""
return pulumi.get(self, "resource_name")
@resource_name.setter
def resource_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_name", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
The resource tags.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
class IotHubResource(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
etag: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
properties: Optional[pulumi.Input[pulumi.InputType['IotHubPropertiesArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
resource_name_: Optional[pulumi.Input[str]] = None,
sku: Optional[pulumi.Input[pulumi.InputType['IotHubSkuInfoArgs']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
"""
The description of the IoT hub.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] etag: The Etag field is *not* required. If it is provided in the response body, it must also be provided as a header per the normal ETag convention.
:param pulumi.Input[str] location: The resource location.
:param pulumi.Input[pulumi.InputType['IotHubPropertiesArgs']] properties: IotHub properties
:param pulumi.Input[str] resource_group_name: The name of the resource group that contains the IoT hub.
:param pulumi.Input[str] resource_name_: The name of the IoT hub.
:param pulumi.Input[pulumi.InputType['IotHubSkuInfoArgs']] sku: IotHub SKU info
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: The resource tags.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: IotHubResourceArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
The description of the IoT hub.
:param str resource_name: The name of the resource.
:param IotHubResourceArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(IotHubResourceArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
etag: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
properties: Optional[pulumi.Input[pulumi.InputType['IotHubPropertiesArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
resource_name_: Optional[pulumi.Input[str]] = None,
sku: Optional[pulumi.Input[pulumi.InputType['IotHubSkuInfoArgs']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = IotHubResourceArgs.__new__(IotHubResourceArgs)
__props__.__dict__["etag"] = etag
__props__.__dict__["location"] = location
__props__.__dict__["properties"] = properties
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["resource_name"] = resource_name_
if sku is None and not opts.urn:
raise TypeError("Missing required property 'sku'")
__props__.__dict__["sku"] = sku
__props__.__dict__["tags"] = tags
__props__.__dict__["name"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:devices/v20190322:IotHubResource"), pulumi.Alias(type_="azure-native:devices:IotHubResource"), pulumi.Alias(type_="azure-nextgen:devices:IotHubResource"), pulumi.Alias(type_="azure-native:devices/v20160203:IotHubResource"), pulumi.Alias(type_="azure-nextgen:devices/v20160203:IotHubResource"), pulumi.Alias(type_="azure-native:devices/v20170119:IotHubResource"), pulumi.Alias(type_="azure-nextgen:devices/v20170119:IotHubResource"), pulumi.Alias(type_="azure-native:devices/v20170701:IotHubResource"), pulumi.Alias(type_="azure-nextgen:devices/v20170701:IotHubResource"), pulumi.Alias(type_="azure-native:devices/v20180122:IotHubResource"), pulumi.Alias(type_="azure-nextgen:devices/v20180122:IotHubResource"), pulumi.Alias(type_="azure-native:devices/v20180401:IotHubResource"), pulumi.Alias(type_="azure-nextgen:devices/v20180401:IotHubResource"), pulumi.Alias(type_="azure-native:devices/v20181201preview:IotHubResource"), pulumi.Alias(type_="azure-nextgen:devices/v20181201preview:IotHubResource"), pulumi.Alias(type_="azure-native:devices/v20190322preview:IotHubResource"), pulumi.Alias(type_="azure-nextgen:devices/v20190322preview:IotHubResource"), pulumi.Alias(type_="azure-native:devices/v20190701preview:IotHubResource"), pulumi.Alias(type_="azure-nextgen:devices/v20190701preview:IotHubResource"), pulumi.Alias(type_="azure-native:devices/v20191104:IotHubResource"), pulumi.Alias(type_="azure-nextgen:devices/v20191104:IotHubResource"), pulumi.Alias(type_="azure-native:devices/v20200301:IotHubResource"), pulumi.Alias(type_="azure-nextgen:devices/v20200301:IotHubResource"), pulumi.Alias(type_="azure-native:devices/v20200401:IotHubResource"), pulumi.Alias(type_="azure-nextgen:devices/v20200401:IotHubResource"), pulumi.Alias(type_="azure-native:devices/v20200615:IotHubResource"), pulumi.Alias(type_="azure-nextgen:devices/v20200615:IotHubResource"), pulumi.Alias(type_="azure-native:devices/v20200710preview:IotHubResource"), pulumi.Alias(type_="azure-nextgen:devices/v20200710preview:IotHubResource"), pulumi.Alias(type_="azure-native:devices/v20200801:IotHubResource"), pulumi.Alias(type_="azure-nextgen:devices/v20200801:IotHubResource"), pulumi.Alias(type_="azure-native:devices/v20200831:IotHubResource"), pulumi.Alias(type_="azure-nextgen:devices/v20200831:IotHubResource"), pulumi.Alias(type_="azure-native:devices/v20200831preview:IotHubResource"), pulumi.Alias(type_="azure-nextgen:devices/v20200831preview:IotHubResource"), pulumi.Alias(type_="azure-native:devices/v20210201preview:IotHubResource"), pulumi.Alias(type_="azure-nextgen:devices/v20210201preview:IotHubResource"), pulumi.Alias(type_="azure-native:devices/v20210303preview:IotHubResource"), pulumi.Alias(type_="azure-nextgen:devices/v20210303preview:IotHubResource")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(IotHubResource, __self__).__init__(
'azure-native:devices/v20190322:IotHubResource',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'IotHubResource':
"""
Get an existing IotHubResource resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = IotHubResourceArgs.__new__(IotHubResourceArgs)
__props__.__dict__["etag"] = None
__props__.__dict__["location"] = None
__props__.__dict__["name"] = None
__props__.__dict__["properties"] = None
__props__.__dict__["sku"] = None
__props__.__dict__["tags"] = None
__props__.__dict__["type"] = None
return IotHubResource(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def etag(self) -> pulumi.Output[Optional[str]]:
"""
The Etag field is *not* required. If it is provided in the response body, it must also be provided as a header per the normal ETag convention.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
The resource location.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> pulumi.Output['outputs.IotHubPropertiesResponse']:
"""
IotHub properties
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter
def sku(self) -> pulumi.Output['outputs.IotHubSkuInfoResponse']:
"""
IotHub SKU info
"""
return pulumi.get(self, "sku")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
The resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The resource type.
"""
return pulumi.get(self, "type")
|
define(function () {
'use strict';
return {
bg_BG: {
'Playlist': 'Плейлист',
'Playback aborted': 'Прекратено изпълнение',
'Network or communication error': 'Проблем с връзка към мрежа',
'Decoding failed. Corruption or unsupported media': 'Провалено декодиране, повереден файл или неподдържан формат',
'Media source not supported': 'Източника на медия не се поддържа',
'Failed to play file': 'Изпълнението на файла се провали',
'Artist': 'Изпълнител',
'Album': 'Албум',
'Track': 'Песен',
'Time': 'Време',
'Media information query failed': 'Получаване на информация провалено',
'seek unavailable in format': 'Невъществуващ формат',
'The audio type is not supported: {0}': 'Аудио формата не се поддържа'
},
de_DE: {
'Playlist': 'Wiedergabeliste',
'Playback aborted': 'Wiedergabe abgebrochen',
'Network or communication error': 'Netzwerk Kommunikationsfehler',
'Decoding failed. Corruption or unsupported media': 'Dekodierung gescheitert. Fehlerhafte oder nicht unterstützte Datei',
'Media source not supported': 'Medienquelle nicht unterstützt',
'Failed to play file': 'Wiedergabe der Datei gescheitert',
'Artist': 'Künstler',
'Album': 'Album',
'Track': 'Titel',
'Time': 'Zeit',
'Media information query failed': 'Media Informationssuche gescheitert',
'seek unavailable in format': 'Spulen im Format nicht verfügbar',
'The audio type is not supported: {0}': 'Der Audio-Typ {0} ist nicht unterstützt'
},
es_ES: {
'Playlist': 'Lista de reproducción',
'Playback aborted': 'Playback anulado',
'Network or communication error': 'Error de red o de comunicación',
'Decoding failed. Corruption or unsupported media': 'Fallo en el desentrelazado. Medio corrupto o no soportado',
'Media source not supported': 'Medio no soportado',
'Failed to play file': 'Error reproduciendo archivo',
'Artist': 'Artista',
'Album': 'Album',
'Track': 'Pista',
'Time': 'Tiempo',
'Media information query failed': 'Error recupersqndo información del medio',
'seek unavailable in format': 'búsqueda no disponible en este formato',
'The audio type is not supported: {0}': 'El tipo de audio no está soportado: {0}'
},
fr_FR: {
'Playlist': 'Liste de lecture',
'Playback aborted': 'Lecture interrompue',
'Network or communication error': 'Erreur de communication ou de réseau',
'Decoding failed. Corruption or unsupported media': 'Décodage raté. Média corrompus ou non pris en charge',
'Media source not supported': 'Source de médias non pris en charge',
'Failed to play file': 'Impossible de lire le fichier',
'Artist': 'Artiste',
'Album': 'Album',
'Track': 'Piste',
'Time': 'Durée',
'Media information query failed': 'Requête des informations média échoué',
'seek unavailable in format': 'recherche indisponible dans ce format',
'The audio type is not supported: {0}': "Le type audio n'est pas pris en charge: {0}"
},
ar_DZ: {
'Playlist': 'قائمة القرائة',
'Playback aborted': 'قطع التشغيل',
'Network or communication error': 'خطأ في الإتصال بالشبكة',
'Decoding failed. Corruption or unsupported media': 'فشل في فك التشفير. وسائط غير صالحة أو غير مدعومة',
'Media source not supported': 'وسائط غير مدعومة',
'Failed to play file': 'لايمكن قراءة الملف',
'Artist': 'الفنان',
'Album': 'الألبوم',
'Track': 'المقطع',
'Time': 'المدة',
'Media information query failed': 'خطأ في قراءة معلومات الوسائط',
'seek unavailable in format': 'بحث غير ممكن في هذا النوع',
'The audio type is not supported: {0}': 'نوع الملف الصوتي غير مدعوم: {0}'
},
it_IT: {
'Playlist': 'Playlist',
'Playback aborted': 'Riproduzione terminata',
'Network or communication error': 'Errore di rete o di comunicazione',
'Decoding failed. Corruption or unsupported media': 'Decodifica fallita. Supporto corroto o non supportato.',
'Media source not supported': 'Sorgente multimediale non supportata',
'Failed to play file': 'Riproduzione file fallita',
'Artist': 'Artista',
'Album': 'Album',
'Track': 'Traccia',
'Time': 'Tempo',
'Media information query failed': 'Recupero informazioni media fallita',
'seek unavailable in format': 'ricerca non disponibile nel formato',
'The audio type is not supported: {0}': 'Tipo di audio non supportato: {0}'
},
ko_KR: {
'Playlist': '재생목록',
'Playback aborted': '일시중지',
'Network or communication error': '네트워크 등 통신 문제가 발생했습니다',
'Decoding failed. Corruption or unsupported media': '디코딩에 실패했습니다. 손상되었거나 지원하지 않는 형식입니다',
'Media source not supported': '지원하지 않는 미디어 소스입니다',
'Failed to play file': '파일을 재생하는데 실패했습니다',
'Artist': '아티스트',
'Album': '앨범',
'Track': '트랙',
'Time': '시간',
'Media information query failed': '미디어 정보 조회에 실패했습니다',
'seek unavailable in format': '탐색을 지원하지 않는 형식입니다',
'The audio type is not supported: {0}': '이 오디오 형식은 지원하지 않습니다: {0}'
},
nl_NL: {
'Playlist': 'Afspeellijst',
'Playback aborted': 'Afspelen afgebroken',
'Network or communication error': 'Netwerk of communicatie fout',
'Decoding failed. Corruption or unsupported media': 'Decoderen mislukt: bestandstype wordt niet ondersteund',
'Media source not supported': 'Mediabron wordt niet ondersteund',
'Failed to play file': 'Afspelen van bestand mislukt',
'Artist': 'Artiest',
'Album': 'Album',
'Track': 'Nummer',
'Time': 'Tijd',
'Media information query failed': 'Zoeken naar media is niet gelukt',
'seek unavailable in format': 'Voor/achteruit spoelen is niet beschikbaar in dit formaat',
'The audio type is not supported: {0}': 'Audio type {0} wordt niet ondersteund'
},
no_NO: {
'Playlist': 'Spilleliste',
'Playback aborted': 'Avspilling avbrutt',
'Network or communication error': 'Nettverks- eller kommunikasjonsfeil',
'Decoding failed. Corruption or unsupported media': 'Dekoding feilet. Korrupt eller ustøttet media',
'Media source not supported': 'Media-kilde ikke støttet',
'Failed to play file': 'Klarte ikke spille av fil',
'Artist': 'Artist',
'Album': 'Album',
'Track': 'Låt',
'Time': 'Tid',
'Media information query failed': 'Media-informasjon forespursel feil',
'seek unavailable in format': 'spoling utilgjenglig i format',
'The audio type is not supported: {0}': 'Denne lyd-typen er ikke støttet: {0}'
},
pl_PL: {
'Playlist': 'Playlista',
'Playback aborted': 'Odtwarzanie Przerwane',
'Network or communication error': 'Błąd Sieci lub Komunikacji',
'Decoding failed. Corruption or unsupported media': 'Dekodowanie nie powiodło się. Uszkodzony lub nieobsługiwany plik',
'Media source not supported': 'Plik nie jest wspierany',
'Failed to play file': 'Nie można odtworzyć pliku',
'Artist': 'Artysta',
'Album': 'Album',
'Track': 'Ścieżka',
'Time': 'Czas',
'Media information query failed': 'Brak informacji',
'seek unavailable in format': 'Przewijanie nie jest obsługiwane w tym formacie',
'The audio type is not supported: {0}': 'Ten typ audio nie jest obsługiwany: {0}'
},
ru_RU: {
'Playlist': 'Список воспроизведения',
'Playback aborted': 'Воспроизведение прервано',
'Network or communication error': 'Ошибка соединения',
'Decoding failed. Corruption or unsupported media': 'Не удалось декодировать файл. Файл поврежден или данынй формат не поддерживается',
'Media source not supported': 'Тип файла не поддерживается',
'Failed to play file': 'Ошибка воспроизведения',
'Artist': 'Артист',
'Album': 'Альбом',
'Track': 'Трек',
'Time': 'Время',
'Media information query failed': 'Ошибка в запросе медиа-информации',
'seek unavailable in format': 'Перемотка недоступна в этом формате',
'The audio type is not supported: {0}': 'Тип аудио не поддерживается: {0}'
},
sk_SK: {
'Playlist': 'Zoznam skladieb',
'Playback aborted': 'Prehrávanie prerušené',
'Network or communication error': 'Chyba v sieťovej komunikácii',
'Decoding failed. Corruption or unsupported media': 'Dekódovanie sa nepodarilo alebo médium je nepodporované',
'Media source not supported': 'Zdrojové médium nie je podporované',
'Failed to play file': 'Chyba pri prehrávaní súboru',
'Artist': 'Umelec',
'Album': 'Album',
'Track': 'Skladba',
'Time': 'Čas',
'Media information query failed': 'Chyba pri získavaní informácii o médiu',
'seek unavailable in format': 'Formát média nepodporuje preskakovanie (seek)',
'The audio type is not supported: {0}': 'Nepodporovaný formát: {0}'
},
tr_TR: {
'Playlist': 'Oynatma listesi',
'Playback aborted': 'kayıt çalma/dinleme durduruldu',
'Network or communication error': 'ağ veya iletişim hatası',
'Decoding failed. Corruption or unsupported media': 'çözümleme hatası. Bozuk veya çalışmıyor.',
'Media source not supported': 'medya kaynağı bulunamadı',
'Failed to play file': 'Oynatma hatası',
'Artist': 'Artist',
'Album': 'Album',
'Track': 'Parça',
'Time': 'zaman',
'Media information query failed': 'medya bilgisini elde etmede hata oluştu',
'seek unavailable in format': 'bu formatta ileri saramazsınız',
'The audio type is not supported: {0}': 'Bu format desteklenmiyor: {0}'
},
vi_VN: {
'Playlist': 'Danh sách phát',
'Playback aborted': 'Phát lại bị hủy',
'Network or communication error': 'Mạng hoặc thông tin liên lạc bị lỗi',
'Decoding failed. Corruption or unsupported media': 'Giải mã thất bại. Tập tin bị hỏng hoặc không được hỗ trợ',
'Media source not supported': 'Nguồn phương tiện không được hỗ trợ',
'Failed to play file': 'Không thể chơi tập tin',
'Artist': 'Ca sĩ',
'Album': 'Album',
'Track': 'Bài hát',
'Time': 'Thời gian',
'Media information query failed': 'Truy vấn thông tin tập tin thất bại',
'seek unavailable in format': 'không tua được trong định dạng này',
'The audio type is not supported: {0}': 'Loại âm thanh {0} không được hỗ trợ'
}
};
});
|
# -*- coding: utf-8 -*-
import scrapy
from douban.items import DoubanItem
class DoubanSpiderSpider(scrapy.Spider):
# 这里是爬虫名
name = 'douban_spider'
# 允许的域名
allowed_domains = ['movie.douban.com']
# 入口url
start_urls = ['https://movie.douban.com/top250']
# 解析规则
# 默认解析方法
def parse(self, response):
# 循环电影的条目
movie_list = response.xpath("//div[@class='article']//ol[@class='grid_view']//li")
for i_item in movie_list:
# 导入item文件
douban_item = DoubanItem()
# 详细的xpath、进行数据解析
douban_item['movie_name'] = i_item.xpath(".//div[@class='info']/div[@class='hd']/a/span[1]/text()").extract_first()
content = i_item.xpath(".//div[@class='info']//div[@class='bd']/p[1]/text()").extract()
# 多行数据解析
for i_content in content:
content_s = "".join(i_content.split())
douban_item['introduce'] = content_s
douban_item['star'] = i_item.xpath(".//span[@class='rating_num']/text()").extract_first()
douban_item['evaluate'] = i_item.xpath(".//div[@class='star']//span[4]/text()").extract_first()
douban_item['describe'] = i_item.xpath(".//p[@class='quote']//span/text()").extract_first()
douban_item['serial_number'] = i_item.xpath(".//div[@class='item']//em//text()").extract_first()
# 需要将数据yield到piplines里面
# 第一页数据
yield douban_item
# 解析下一页规则、取后一页的xpath、这里是使用根节点进行解析的
next_link = response.xpath("//span[@class='next']/link/@href").extract()
if next_link:
next_link = next_link[0]
yield scrapy.Request("https://movie.douban.com/top250"+next_link, callback=self.parse)
|
/*
Copyright (c) 2003-2020, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or https://ckeditor.com/legal/ckeditor-oss-license
*/
CKEDITOR.plugins.setLang("easyimage","ru",{commands:{fullImage:"Изображение во всю ширину",sideImage:"Изображение сбоку",altText:"Изменить альтернативный текст",upload:"Загрузить изображение"},uploadFailed:"Ваше изображение не может быть загружено из-за сетевой ошибки"});
|
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_FLOW_LAYERS_LAYER_TREE_H_
#define FLUTTER_FLOW_LAYERS_LAYER_TREE_H_
#include <stdint.h>
#include <memory>
#include "flutter/flow/compositor_context.h"
#include "flutter/flow/layers/layer.h"
#include "lib/fxl/macros.h"
#include "lib/fxl/time/time_delta.h"
#include "third_party/skia/include/core/SkSize.h"
namespace flow {
class LayerTree {
public:
LayerTree();
~LayerTree();
// Raster includes both Preroll and Paint.
void Raster(CompositorContext::ScopedFrame& frame,
bool ignore_raster_cache = false);
void Preroll(CompositorContext::ScopedFrame& frame,
bool ignore_raster_cache = false);
#if defined(OS_FUCHSIA)
void set_device_pixel_ratio(float device_pixel_ratio) {
device_pixel_ratio_ = device_pixel_ratio;
}
void UpdateScene(SceneUpdateContext& context,
scenic_lib::ContainerNode& container);
#endif
void Paint(CompositorContext::ScopedFrame& frame);
Layer* root_layer() const { return root_layer_.get(); }
void set_root_layer(std::unique_ptr<Layer> root_layer) {
root_layer_ = std::move(root_layer);
}
const SkISize& frame_size() const { return frame_size_; }
void set_frame_size(const SkISize& frame_size) { frame_size_ = frame_size; }
void set_construction_time(const fxl::TimeDelta& delta) {
construction_time_ = delta;
}
const fxl::TimeDelta& construction_time() const { return construction_time_; }
// The number of frame intervals missed after which the compositor must
// trace the rasterized picture to a trace file. Specify 0 to disable all
// tracing
void set_rasterizer_tracing_threshold(uint32_t interval) {
rasterizer_tracing_threshold_ = interval;
}
uint32_t rasterizer_tracing_threshold() const {
return rasterizer_tracing_threshold_;
}
void set_checkerboard_raster_cache_images(bool checkerboard) {
checkerboard_raster_cache_images_ = checkerboard;
}
void set_checkerboard_offscreen_layers(bool checkerboard) {
checkerboard_offscreen_layers_ = checkerboard;
}
private:
SkISize frame_size_; // Physical pixels.
std::unique_ptr<Layer> root_layer_;
fxl::TimeDelta construction_time_;
uint32_t rasterizer_tracing_threshold_;
bool checkerboard_raster_cache_images_;
bool checkerboard_offscreen_layers_;
#if defined(OS_FUCHSIA)
float device_pixel_ratio_ = 1.f;
#endif
FXL_DISALLOW_COPY_AND_ASSIGN(LayerTree);
};
} // namespace flow
#endif // FLUTTER_FLOW_LAYERS_LAYER_TREE_H_
|
const {parse, sep, normalize: norm} = require('path')
function* commonArrayMembers (a, b) {
const [l, s] = a.length > b.length ? [a, b] : [b, a]
for (const x of s) {
if (x === l.shift())
yield x
else
break
}
}
const commonAncestorPath = (a, b) => a === b ? a
: parse(a).root !== parse(b).root ? null
: [...commonArrayMembers(norm(a).split(sep), norm(b).split(sep))].join(sep)
module.exports = (...paths) => paths.reduce(commonAncestorPath)
|
'use strict';
import React from 'react';
import {
AppRegistry,
StyleSheet,
Text,
View,
ListView,
Image
} from 'react-native';
class RNHighScores extends React.Component {
constructor(props) {
super(props);
const ds = new ListView.DataSource({ rowHasChanged: (r1, r2) => r1 !== r2 });
this.state = {
dataSource: ds.cloneWithRows([
[{label:"A",image:"blue.png"}, {label:"B",image:"blue.png"},]
])
};
}
render() {
var contents = this.props["scores"].map(
score => <Text key={score.name}>{score.name}:{score.value}{"\n"}</Text>
);
return (
<ListView
dataSource={this.state.dataSource}
renderRow={this.renderRow.bind(this)}
contentContainerStyle={styles.contentViewStyle}
scrollEnabled={false}
/>
);
}
renderRow(rowData) {
return (
<View>
{this.renderList(rowData)}
</View>
);
}
renderList(list){
return list.map( item => this.renderItem(item) );
}
renderItem(item) {
return (
<View style={styles.cellStyle}>
<Image source={{uri:item.image}} style={{width:52,height:52}}/>
<Text>{item.label}</Text>
</View>
);
}
}
const styles = StyleSheet.create({
container: {
flex: 1,
justifyContent: 'center',
alignItems: 'center',
backgroundColor: '#FFFFFF',
},
highScoresTitle: {
fontSize: 20,
textAlign: 'center',
margin: 10,
},
scores: {
textAlign: 'center',
color: '#333333',
marginBottom: 5,
},
contentViewStyle: {
flexDirection: 'row',
flexWrap: 'wrap',
alignItems:'flex-start',
},
});
// 整体js模块的名称
AppRegistry.registerComponent('RNHighScores', () => RNHighScores);
|
import numbers
import unittest
import numpy as np
from bio_rtd import peak_shapes, utils
from bio_rtd.uo import surge_tank
from bio_rtd.utils import vectors
from bio_rtd_test.aux_bio_rtd_test import TestLogger
class MockUpNoSimCstr(surge_tank.CSTR):
sim_conv = False
sim_num = False
def _sim_convolution(self):
assert not self.sim_conv
self.sim_conv = True
def _sim_numerical(self):
assert not self.sim_num
self.sim_num = True
def f_constant(self, f_const=1.0):
return np.ones_like(self.t) * f_const
def f_box_shaped(self, f_on=1.0):
_f = np.ones_like(self.t) * f_on
t_on = 20
t_off = self.t[-1] * 19 / 20
_f[int(round(t_on / self.dt)):int(round(t_off / self.dt))] = f_on
return _f
# noinspection DuplicatedCode
def f_periodic(self, f_on=1.0):
_f = np.zeros_like(self.t)
t_period = 20.23
t_on = 5.34
i_on = int(round(t_on / self.dt))
t_on = i_on * self.dt
t_delay = 20
t_shorter_end = 40
t_period_start = np.arange(t_delay, self.t[-1] - t_shorter_end, t_period)
dt = int(round(t_period_start[0])) - t_period_start[0]
t_period_start += dt
i_f_start = [t_p / self.dt for t_p in t_period_start]
df_init = round(i_f_start[0]) - i_f_start[0]
i_f_start = [i + df_init for i in i_f_start]
for i in i_f_start:
i_r = int(round(i))
_f[i_r:i_r + i_on] = f_on
_f[self.t.size - int(round(t_shorter_end / self.dt)):] = 0
self.f_period_average = f_on * i_on * self.dt / t_period
self.t_period = t_period
self.t_on = t_on
self.i_f_start = i_f_start
return _f
# noinspection DuplicatedCode
def f_periodic_2(self, f_on=1.0):
# one full period and one clipped
_f = np.zeros_like(self.t)
t_period = 120
t_on = 40
i_on = int(round(t_on / self.dt))
t_on = i_on * self.dt
# t_delay = 30
t_shorter_end = 30
i_f_start = [t_p / self.dt for t_p in [30, 150]]
df_init = round(i_f_start[0]) - i_f_start[0]
i_f_start = [i + df_init for i in i_f_start]
for i in i_f_start:
i_r = int(round(i))
_f[i_r:i_r + i_on] = f_on
_f[self.t.size - int(round(t_shorter_end / self.dt)):] = 0
self.f_period_average = f_on * i_on * self.dt / t_period
self.t_period = t_period
self.t_on = t_on
self.i_f_start = i_f_start
return _f
def c_profile_1_specie(self):
c = np.ones([1, self.t.size]) * 5.2
c[0, 40:110] = 0
return c
def c_profile_2_species(self):
c = np.ones([2, self.t.size])
c[0, :20] = 0
c[1, :] = 2
c[1, 30:] = 0
return c
class CstrTest(unittest.TestCase):
def setUp(self) -> None:
self.t = np.linspace(0, 200, 1200)
self.dt = self.t[1]
self.uo_id = "cstr"
self.gui_title = "Ideal CSTR"
self.cstr = surge_tank.CSTR(self.t, self.uo_id, self.gui_title)
self.cstr.log = TestLogger()
self.f_period_average = 0
self.t_period = 0
self.i_f_start = 0
self.t_on = 0
def assert_positive_value(self, par_name, func):
v = getattr(self.cstr, par_name)
if isinstance(v, numbers.Number):
setattr(self.cstr, par_name, -1)
if isinstance(v, np.ndarray):
setattr(self.cstr, par_name, np.array([]))
if isinstance(v, bool):
setattr(self.cstr, par_name, not v)
with self.assertRaises(AssertionError):
func()
setattr(self.cstr, par_name, v)
def test_init(self):
# test passed parameters
np.testing.assert_array_equal(self.cstr._t, self.t)
self.assertEqual(self.cstr.uo_id, self.uo_id)
self.assertEqual(self.cstr.gui_title, self.gui_title)
# test default parameters
# volume
self.assertEqual(-1, self.cstr.rt_target)
self.assertEqual(-1, self.cstr.v_void)
self.assertEqual(-1, self.cstr.v_min)
self.assertEqual(-1, self.cstr.v_min_ratio)
# init volume
self.assertEqual(-1, self.cstr.v_init)
self.assertEqual(-1, self.cstr.v_init_ratio)
# init conc
self.assertTrue(self.cstr.c_init.size == 0)
# empty start
self.assertFalse(self.cstr.starts_empty)
def test_calc_f_out_target_and_t_cycle(self):
# constant
self.cstr._f = f_constant(self, 5)
self.cstr._calc_f_out_target_and_t_cycle()
self.assertTrue(self.cstr._is_f_in_box_shaped)
self.assertEqual(5, self.cstr._f_out_target)
self.assertEqual(0, self.cstr._t_cycle)
# box shaped
self.cstr._f = f_box_shaped(self, 15)
self.cstr._calc_f_out_target_and_t_cycle()
self.assertTrue(self.cstr._is_f_in_box_shaped)
self.assertEqual(15, self.cstr._f_out_target)
self.assertEqual(0, self.cstr._t_cycle)
def check_periodic():
self.cstr._calc_f_out_target_and_t_cycle()
self.assertFalse(self.cstr._is_f_in_box_shaped)
self.assertAlmostEqual(self.f_period_average,
self.cstr._f_out_target,
0)
self.assertAlmostEqual(self.t_period, self.cstr._t_cycle, 0)
# periodic 1
self.cstr._f = f_periodic(self, 15)
check_periodic()
# periodic 2
self.cstr._f = f_periodic_2(self, 25)
check_periodic()
def test_calc_v_void(self):
# prepare
self.cstr._f = f_periodic_2(self, 1.43)
self.cstr._calc_f_out_target_and_t_cycle()
# assert
with self.assertRaises(RuntimeError):
self.cstr._calc_v_void()
def calc_delta_v():
f_in = self.cstr._f.max()
return self.cstr._f_out_target \
* (1 - self.cstr._f_out_target / f_in) \
* self.cstr._t_cycle
def use_rt_target():
self.cstr.rt_target = 10.2
self.cstr._calc_v_void()
self.assertEqual(self.cstr.rt_target * self.cstr._f_out_target,
self.cstr._v_void)
def use_v_min_ratio():
self.cstr.v_min_ratio = 0.2
self.cstr._t_cycle = -1
with self.assertRaises(AssertionError):
self.cstr._calc_v_void()
self.cstr._t_cycle = 15.2
self.cstr._calc_v_void()
self.assertEqual(
calc_delta_v() / (1 - self.cstr.v_min_ratio),
self.cstr._v_void
)
def use_v_min():
self.cstr.v_min = 14.3
self.cstr._t_cycle = -1
with self.assertRaises(AssertionError):
self.cstr._calc_v_void()
self.cstr._t_cycle = 11.2
self.cstr._calc_v_void()
self.assertEqual(
calc_delta_v() + self.cstr.v_min,
self.cstr._v_void
)
def use_v_void():
self.cstr.v_void = 22.2
self.cstr._calc_v_void()
self.assertEqual(
self.cstr.v_void,
self.cstr._v_void
)
# calc
# rt_target
use_rt_target()
# v_min_ratio
with self.assertWarns(Warning):
# test priority over rt_target
use_v_min_ratio()
self.cstr.rt_target = -1
use_v_min_ratio()
# v_min
with self.assertWarns(Warning): # test parameter priority
# test priority over v_min_ratio
use_v_min()
self.cstr.v_min_ratio = -1
use_v_min()
# v_void
with self.assertWarns(Warning): # test parameter priority
# test priority over v_min
use_v_void()
self.cstr.v_min = -1
use_v_void()
# noinspection DuplicatedCode
def test_calc_v_init(self):
# default: `_v_init = _v_void` & warning
with self.assertRaises(AssertionError):
self.cstr._calc_v_init()
self.cstr._v_void = 1.2
with self.assertWarns(Warning):
self.cstr._calc_v_init()
self.assertEqual(self.cstr._v_void, self.cstr._v_init)
# v_init_ratio
self.cstr.v_init_ratio = 0.2
self.cstr._v_void = -1
with self.assertRaises(AssertionError):
self.cstr._calc_v_init()
self.cstr._v_void = 1.3
self.cstr._calc_v_init()
self.assertEqual(self.cstr._v_void * self.cstr.v_init_ratio,
self.cstr._v_init)
self.cstr._v_void = -1
# v_init
self.cstr.v_init = 35.2
with self.assertWarns(Warning):
# priority over v_init_ratio
self.cstr._calc_v_init()
self.assertEqual(self.cstr.v_init, self.cstr._v_init)
self.cstr.v_init_ratio = -1
self.cstr._calc_v_init()
self.assertEqual(self.cstr.v_init, self.cstr._v_init)
self.cstr.v_init = 0
self.cstr._calc_v_init()
self.assertEqual(self.cstr.v_init, self.cstr._v_init)
# starts empty
# to ignore nby the method
self.cstr.v_init = 335.2
self.cstr.v_init_ratio = 24.2
# set starts_empty
self.cstr.starts_empty = True
# test results
self.cstr._calc_v_init()
self.assertEqual(0, self.cstr._v_init)
# noinspection DuplicatedCode
def test_calc_c_init(self):
# prepare
self.cstr._n_species = 2
# default
self.cstr._calc_c_init()
np.testing.assert_array_equal(np.array([[0], [0]]),
self.cstr._c_init)
# defined
self.cstr.c_init = np.array([[2.2], [0.3]])
self.cstr._calc_c_init()
np.testing.assert_array_equal(np.array([[2.2], [0.3]]),
self.cstr._c_init)
# defined 2
self.cstr.c_init = np.array([3.2, 0.2])
self.cstr._calc_c_init()
np.testing.assert_array_equal(np.array([[3.2], [0.2]]),
self.cstr._c_init)
# defined wrong
self.cstr.c_init = np.array([3.2])
with self.assertRaises(ValueError):
self.cstr._calc_c_init()
def sim_convolution(self):
rt = self.cstr._v_void / self.cstr._f_out_target
t = np.arange(0, min(rt * 10, self.t[-1]), self.dt)
p = peak_shapes.tanks_in_series(t, rt, 1, self.cstr.log)
c = utils.convolution.time_conv(self.dt,
self.cstr._c, p,
self.cstr._c_init)
return c, p, rt
def test_sim_convolution(self):
# prepare
self.cstr._c = c_profile_2_species(self)
self.cstr._n_species = 2
data_log = self.cstr.log.get_data_tree(self.cstr.uo_id)
v_void = 14.5
f_out_target = 2.3
c_init = np.array([[2.2], [3.1]])
# assign
self.cstr._v_void = v_void
self.cstr._f_out_target = f_out_target
self.cstr._is_f_in_box_shaped = True
self.cstr._c_init = c_init
# assert parameters
self.assert_positive_value("_f_out_target", self.cstr._sim_convolution)
self.assert_positive_value("_v_void", self.cstr._sim_convolution)
self.assert_positive_value("_c_init", self.cstr._sim_convolution)
self.assert_positive_value("_f_out_target", self.cstr._sim_convolution)
self.assert_positive_value("_is_f_in_box_shaped",
self.cstr._sim_convolution)
def eval_sim_conv() -> (np.ndarray, np.ndarray, float):
# targets
c, p, rt = self.sim_convolution()
# calc
self.cstr._sim_convolution()
# compare
self.assertEqual(rt, data_log["rt_mean"])
np.testing.assert_array_almost_equal(p, data_log["p_rtd"])
np.testing.assert_array_almost_equal(c, self.cstr._c)
# sim 2 species
# warning due to low temporal resolution
with self.assertWarns(Warning, msg=f"Warning: Peak shape: integral:"
f" 1.0132418166911727"):
eval_sim_conv()
# sim 1 specie
self.cstr._c = c_profile_1_specie(self)
self.cstr._n_species = 1
self.cstr._c_init = np.array([[2.1]])
# warning due to low temporal resolution
with self.assertWarns(Warning, msg=f"Warning: Peak shape: integral:"
f" 1.0132418166911727"):
eval_sim_conv()
def sim_numerical(self):
# status in cstr
v = self.cstr._v_init
m = self.cstr._c_init.flatten() * v
# init result vectors
_c = np.zeros_like(self.cstr._c)
_f = np.zeros_like(self.cstr._f)
# do not turn off outlet once started
keep_outlet_on = False
for i in range(utils.vectors.true_start(self.cstr._f > 0),
self.cstr._t.size):
# fill in
dv = self.cstr._f[i] * self.dt
v += dv
m += self.cstr._c[:, i] * dv
if v < self.cstr._f_out_target * self.cstr._dt: # CSTR is empty
if not keep_outlet_on: # it was not yet turned on
_c[:, i] = 0
_f[i] = 0
else: # CSTR dry during operation -> shout it down
self.cstr.log.i(f"CSTR ran dry during operation"
f" -> shutting down")
_c[:, i:] = 0
_f[i:] = 0
return _f, _c
elif v < self.cstr._v_void and not keep_outlet_on:
# Wait until filled.
_c[:, i] = 0
_f[i] = 0
else: # outlet on
keep_outlet_on = True
# calc current concentration
c = m / v
# get outlet
_c[:, i] = c
_f[i] = self.cstr._f_out_target
# subtract outlet from cstr
v -= _f[i] * self.dt
m -= _f[i] * self.dt * c
return _f, _c
def test_sim_numerical(self):
# prepare
self.cstr._c = c_profile_2_species(self)
self.cstr._n_species = 2
v_void = 12.5
v_init = 6.1
f_out_target = 2.4
c_init = np.array([[3.2], [5.1]])
# bind
self.cstr._f_out_target = f_out_target
self.cstr._v_void = v_void
self.cstr._v_init = v_init
self.cstr._c_init = c_init
# assert parameters
self.assert_positive_value("_f_out_target", self.cstr._sim_convolution)
self.assert_positive_value("_v_void", self.cstr._sim_convolution)
self.assert_positive_value("_v_init", self.cstr._sim_convolution)
self.assert_positive_value("_c_init", self.cstr._sim_convolution)
def eval_sim(include_convolution=False, include_init_small_bump=False):
# prepare
self.cstr._calc_f_out_target_and_t_cycle()
if include_init_small_bump:
self.cstr._f[0] = 0.002
if include_convolution:
self.cstr._v_init = self.cstr._v_void
else:
self.cstr._v_init = v_init
# targets
if include_convolution:
f_conv = self.cstr._f.copy()
c_conv, _, _ = self.sim_convolution()
f_num, c_num = self.sim_numerical()
# calc
self.cstr._sim_numerical()
# compare
np.testing.assert_array_almost_equal(f_num, self.cstr._f)
np.testing.assert_array_almost_equal(c_num, self.cstr._c)
if include_convolution:
# noinspection PyUnboundLocalVariable
i_f_end = utils.vectors.true_end(f_conv > 0)
np.testing.assert_array_almost_equal(f_conv[:i_f_end],
self.cstr._f[:i_f_end])
np.testing.assert_array_almost_equal(c_num[:, :i_f_end],
self.cstr._c[:, :i_f_end])
# sim 2 species
self.cstr._f = f_constant(self, )
eval_sim(True)
self.cstr._f = f_periodic(self)
eval_sim()
self.cstr._f = f_periodic_2(self)
eval_sim()
# sim 1 specie
self.cstr._c = c_profile_1_specie(self)
self.cstr._n_species = 1
self.cstr._c_init = np.array([[2.1]])
# sim
self.cstr._f = f_constant(self)
eval_sim(True)
self.cstr._f = f_periodic(self)
eval_sim()
self.cstr._f = f_periodic_2(self)
eval_sim()
self.cstr._f = f_periodic(self)
self.cstr._f[int(round(self.t.size / 2)):] = 0
with self.assertWarns(Warning):
eval_sim()
# special case with small f at start
self.cstr._f = f_periodic(self)
v_init = 0
eval_sim(False, True)
def test_calculate(self):
self.m_cstr = MockUpNoSimCstr(self.t, self.uo_id, self.gui_title)
self.m_cstr.log = TestLogger()
self.m_cstr.rt_target = 12.2
self.m_cstr.v_init = 12.2
data_log = self.m_cstr.log.get_data_tree(self.uo_id)
def run_test(assert_conv=True):
data_log.clear()
self.m_cstr._calculate()
self.assertTrue(self.m_cstr.sim_conv is assert_conv)
self.assertTrue(self.m_cstr.sim_num is not assert_conv)
self.m_cstr.sim_conv = False
self.m_cstr.sim_num = False
# make sure parameters are in log
self.assertTrue("f_out_target" in data_log.keys())
self.assertTrue("t_cycle" in data_log.keys())
self.assertTrue("v_void" in data_log.keys())
self.assertTrue("v_init" in data_log.keys())
self.assertTrue("c_init" in data_log.keys())
self.m_cstr._f = f_constant(self, )
self.m_cstr._c = c_profile_2_species(self)
self.m_cstr._n_species = 2
run_test(True)
self.m_cstr._f = f_periodic_2(self)
self.m_cstr._c = c_profile_2_species(self)
self.m_cstr._n_species = 2
run_test(False)
self.m_cstr._f = f_periodic_2(self)
self.m_cstr._c = c_profile_1_specie(self)
self.m_cstr._n_species = 1
self.m_cstr.v_init = 2.2
run_test(False)
class MockUpNoSimTwoAlternatingCSTRs(surge_tank.TwoAlternatingCSTRs):
def __init__(self, t: np.ndarray):
super().__init__(t, "mock_up_a2cstr")
self.f_calls = []
def _calc_f_out_target_and_t_cycle(self):
self.f_calls.append(1)
def _calc_t_leftover(self):
self.f_calls.append(2)
def _calc_i_switch_list(self):
self.f_calls.append(3)
def _simulate_cycle_by_cycle(self):
self.f_calls.append(4)
def _ensure_box_shape(self):
self.f_calls.append(5)
class TestTwoAlternatingCSTRs(unittest.TestCase):
def setUp(self) -> None:
self.t = np.linspace(0, 200, 1200)
self.dt = self.t[1]
self.uo_id = "twin_cstr_system"
self.gui_title = "2 Alternating CSTRs"
self.a2cstr = surge_tank.TwoAlternatingCSTRs(self.t,
self.uo_id,
self.gui_title)
self.a2cstr.log = TestLogger()
self.f_period_average = 0
self.t_period = 0
self.t_on = 0
self.i_f_start = []
def assert_defined_value(self, par_name, func):
v = getattr(self.a2cstr, par_name)
delattr(self.a2cstr, par_name)
with self.assertRaises(AssertionError):
func()
setattr(self.a2cstr, par_name, v)
def assert_positive_value(self, par_name, func):
v = getattr(self.a2cstr, par_name)
setattr(self.a2cstr, par_name, -1)
with self.assertRaises(AssertionError):
func()
setattr(self.a2cstr, par_name, v)
def test_init(self):
# Test passed parameters.
np.testing.assert_array_equal(self.a2cstr._t, self.t)
self.assertEqual(self.a2cstr.uo_id, self.uo_id)
self.assertEqual(self.a2cstr.gui_title, self.gui_title)
# Test default parameters.
self.assertEqual(1, self.a2cstr.collect_n_periods)
self.assertEqual(0.9, self.a2cstr.relative_role_switch_time)
self.assertEqual(-1, self.a2cstr.t_cycle)
self.assertEqual(-1, self.a2cstr.v_cycle)
self.assertEqual(-1, self.a2cstr.v_leftover)
self.assertEqual(-1, self.a2cstr.v_leftover_rel)
def test_calc_f_out_target_and_t_cycle_constant(self):
def test_non_periodic_flow(f_out):
self.assertTrue(self.a2cstr._is_flow_box_shaped())
self.a2cstr.t_cycle = -1
self.a2cstr.v_cycle = -1
with self.assertRaises(AssertionError):
self.a2cstr._calc_f_out_target_and_t_cycle()
self.a2cstr.t_cycle = 15
self.a2cstr.v_cycle = 15 * f_out
with self.assertRaises(AssertionError):
self.a2cstr._calc_f_out_target_and_t_cycle()
self.a2cstr.v_cycle = -1
self.a2cstr._calc_f_out_target_and_t_cycle()
self.assertEqual(f_out, self.a2cstr._f_out_target)
self.assertEqual(15, self.a2cstr._t_cycle)
self.a2cstr.t_cycle = -1
self.a2cstr.v_cycle = 15 * f_out
self.a2cstr._calc_f_out_target_and_t_cycle()
self.assertEqual(f_out, self.a2cstr._f_out_target)
self.assertEqual(15, self.a2cstr._t_cycle)
# Constant inlet flow rate.
self.a2cstr._f = f_constant(self, 5)
test_non_periodic_flow(5)
# Box shaped inlet profile.
self.a2cstr._f = f_box_shaped(self, 15)
test_non_periodic_flow(15)
def test_calc_f_out_target_and_t_cycle_periodic(self):
def test_periodic_flow():
self.a2cstr._calc_f_out_target_and_t_cycle()
self.assertFalse(self.a2cstr._is_flow_box_shaped())
self.assertAlmostEqual(
self.f_period_average,
self.a2cstr._f_out_target, 0)
self.assertAlmostEqual(
self.t_period * self.a2cstr.collect_n_periods,
self.a2cstr._t_cycle, 0)
# Periodic inlet flow rate 1.
self.a2cstr._f = f_periodic(self, 15)
test_periodic_flow()
# Periodic inlet flow rate 2.
self.a2cstr._f = f_periodic_2(self, 25)
test_periodic_flow()
def test_calc_t_leftover(self):
def run_test(v_leftover, v_leftover_rel,
f_out_target, t_cycle):
self.a2cstr.v_leftover = v_leftover
self.a2cstr.v_leftover_rel = v_leftover_rel
self.a2cstr._f_out_target = f_out_target
self.a2cstr._t_cycle = t_cycle
self.assert_defined_value('_f_out_target',
self.a2cstr._calc_t_leftover)
self.assert_defined_value('_t_cycle',
self.a2cstr._calc_t_leftover)
if v_leftover > 0 and v_leftover_rel > 0:
with self.assertRaises(AssertionError):
self.a2cstr._calc_t_leftover()
return
if v_leftover > 0:
t_leftover = v_leftover / f_out_target
elif v_leftover_rel > 0:
t_leftover = v_leftover_rel * t_cycle
else:
t_leftover = 0
# Call method.
self.a2cstr._calc_t_leftover()
# Compare.
self.assertAlmostEqual(t_leftover, self.a2cstr._t_leftover)
run_test(3.4, 0, 3.5, 14.5)
run_test(-1, 0.5, 3.5, 14.5)
run_test(-1, 0.5, 3.5, 0)
run_test(0, 1.5, 3.5, 1)
run_test(1.1, -1, 3.5, 1)
run_test(1.1, 0, 3.5, 1)
run_test(3.4, 0.2, 3.5, 14.5)
run_test(-1, -1, 3.5, 14.5)
run_test(-1, 0, 3.5, 14.5)
run_test(0, -1, 3.5, 14.5)
def test_calc_i_switch_list_constant(self):
def test_non_periodic_flow(f_out: float,
t_cycle: float,
t_leftovers: float):
self.assertTrue(self.a2cstr._is_flow_box_shaped())
self.a2cstr._f_out_target = f_out
self.a2cstr._t_cycle = t_cycle
self.a2cstr._t_leftover = t_leftovers
self.assert_defined_value('_f_out_target',
self.a2cstr._calc_i_switch_list)
self.assert_defined_value('_t_cycle',
self.a2cstr._calc_i_switch_list)
self.assert_defined_value('_t_leftover',
self.a2cstr._calc_i_switch_list)
self.a2cstr._calc_i_switch_list()
i_start, i_end = vectors.true_start_and_end(self.a2cstr._f > 0)
i_cycle = t_cycle / self.dt
i_leftovers = t_leftovers / self.dt
i_switch_list = []
i_switch = i_start + i_cycle + 2 * i_leftovers
i_switch_list.append(i_switch)
i_switch += i_cycle + i_leftovers
i_switch_list.append(i_switch)
i_switch += i_cycle
while i_switch < self.t.size:
i_switch_list.append(i_switch)
i_switch += i_cycle
np.testing.assert_array_almost_equal(
i_switch_list,
self.a2cstr._i_switch_list
)
# Constant inlet flow rate.
self.a2cstr._f = f_constant(self, 5)
test_non_periodic_flow(5, 15.4, 0)
test_non_periodic_flow(5, 15.4, 2)
# Box shaped inlet profile.
self.a2cstr._f = f_box_shaped(self, 15)
test_non_periodic_flow(15, 10.2, 0)
test_non_periodic_flow(15, 15.4, 0.8)
test_non_periodic_flow(15, 15.4, 3)
def test_calc_i_switch_list_periodic(self):
def test_periodic_flow(t_leftovers: float,
relative_role_switch_time: float):
self.assertFalse(self.a2cstr._is_flow_box_shaped())
self.a2cstr._f_out_target = self.f_period_average
self.a2cstr._t_cycle = self.t_period
self.a2cstr._t_leftover = t_leftovers
self.a2cstr.relative_role_switch_time = relative_role_switch_time
self.assert_defined_value('_f_out_target',
self.a2cstr._calc_i_switch_list)
self.assert_defined_value('_t_cycle',
self.a2cstr._calc_i_switch_list)
self.assert_defined_value('_t_leftover',
self.a2cstr._calc_i_switch_list)
self.assert_positive_value('relative_role_switch_time',
self.a2cstr._calc_i_switch_list)
# Should be <= 1.
self.a2cstr.relative_role_switch_time = 1.1
with self.assertRaises(AssertionError):
self.a2cstr._calc_i_switch_list()
self.a2cstr.relative_role_switch_time = relative_role_switch_time
# Calc reference.
i_f_on = self.t_on / self.dt
i_cycle = self.t_period / self.dt
i_leftovers = t_leftovers / self.dt
i_delay = relative_role_switch_time * (i_cycle - i_f_on)
if i_delay < 2 * i_leftovers:
# Running function should raise assertion error.
with self.assertRaises(AssertionError):
self.a2cstr._calc_i_switch_list()
return
i_switch_list = []
for i, i_sw in enumerate(self.i_f_start.copy()):
i_sw += i_delay + i_f_on
if i == 1:
i_sw -= i_leftovers
elif i > 1:
i_sw -= 2 * i_leftovers
if i_sw >= self.t.size:
break
i_switch_list.append(i_sw)
# Add extra entries after flow rate ends.
i_switch = i_switch_list[-1] + i_cycle
while i_switch < self.t.size:
i_switch_list.append(i_switch)
i_switch += i_cycle
# Run function in the model.
self.a2cstr._calc_i_switch_list()
# Compare.
np.testing.assert_array_almost_equal(
i_switch_list,
self.a2cstr._i_switch_list
)
# Periodic inlet flow rate 1.
self.a2cstr._f = f_periodic(self, 15)
self.a2cstr._calc_f_out_target_and_t_cycle()
test_periodic_flow(0, 0)
test_periodic_flow(2, 0.1)
test_periodic_flow(2., 0.9)
test_periodic_flow(3., 0)
# Periodic inlet flow rate 2.
self.a2cstr._f = f_periodic_2(self, 25)
self.a2cstr._calc_f_out_target_and_t_cycle()
test_periodic_flow(0, 0)
test_periodic_flow(2, 0.1)
test_periodic_flow(2., 0.9)
test_periodic_flow(12., 0.9)
test_periodic_flow(3., 0)
def run_simulation(self):
f_in = self.a2cstr._f.copy()
c_in = self.a2cstr._c.copy()
self.a2cstr._calc_f_out_target_and_t_cycle()
self.a2cstr._calc_t_leftover()
self.a2cstr._calc_i_switch_list()
self.a2cstr._simulate_cycle_by_cycle()
f_out, c_out = self.a2cstr._f, self.a2cstr._c
cycles_data = self.a2cstr._log_tree["cycles"]
v1_leftover = cycles_data[-1]["v_after_discharge"]
v2_leftover = cycles_data[-2]["v_after_discharge"]
m1_leftover = cycles_data[-1]["m_after_discharge"]
m2_leftover = cycles_data[-2]["m_after_discharge"]
# from bokeh.plotting import figure, show
# f = figure()
# f.line(self.t, f_in)
# f.line(self.t, f_out)
# f.circle(cycles_data[-1]["i_start_discharge"] * self.dt, 5)
# f.circle(cycles_data[-2]["i_start_discharge"] * self.dt, 5)
# show(f, browser='firefox')
# print(f_in.sum() * self.dt)
# print(sum([v['v_after_collection'] for v in cycles_data]))
# print(f_out.sum() * self.dt + v1_leftover + v2_leftover)
# print(f_out.sum() * self.dt)
# print(v1_leftover + v2_leftover)
# print((f_in.sum() - f_out.sum()) / self.a2cstr._f_out_target)
# print((f_in.sum() - f_out.sum()
# - (v1_leftover + v2_leftover) / self.dt)
# / self.a2cstr._f_out_target)
# print(f_out.mean())
# print(f_out.max())
# Mass balance test.
self.assertAlmostEqual(
f_in.sum() * self.dt,
f_out.sum() * self.dt + v1_leftover + v2_leftover
)
self.assertAlmostEqual(
(c_in * f_in[np.newaxis, :]).sum() * self.dt,
(c_out * f_out[np.newaxis, :]).sum() * self.dt
+ m1_leftover.sum() + m2_leftover.sum()
)
def test_simulate_cycle_by_cycle_constant(self):
self.a2cstr.t_cycle = 14.5
# # Constant inlet flow rate.
self.a2cstr._f = f_constant(self, 5)
self.a2cstr._c = c_profile_1_specie(self)
self.run_simulation()
self.a2cstr._f = f_constant(self, 5)
self.a2cstr._c = c_profile_2_species(self)
self.run_simulation()
# Box shaped inlet profile.
self.a2cstr._f = f_box_shaped(self, 15)
self.a2cstr._c = c_profile_1_specie(self)
self.run_simulation()
self.a2cstr._f = f_box_shaped(self, 15)
self.a2cstr._c = c_profile_2_species(self)
self.run_simulation()
self.a2cstr._f = f_box_shaped(self, 15)
self.a2cstr._c = c_profile_2_species(self)
self.a2cstr.v_leftover = 1.3
self.run_simulation()
# Border case 1 (at end of t).
self.a2cstr._f = np.ones_like(self.t) * 14
self.a2cstr._f[-250:] = 0
self.a2cstr.t_cycle = 50.01
self.a2cstr._c = c_profile_1_specie(self)
self.run_simulation()
# Border case 2 (< 1 time step discharge before t end).
self.a2cstr._f = np.ones_like(self.t) * 14
self.a2cstr._f[-650:] = 0
self.a2cstr.t_cycle = 91.71
self.a2cstr._c = c_profile_1_specie(self)
self.run_simulation()
def test_simulate_cycle_by_cycle_periodic(self):
# Periodic inlet flow rate 1.
self.a2cstr._f = f_periodic(self, 15)
self.a2cstr._c = c_profile_1_specie(self)
self.run_simulation()
self.a2cstr._f = f_periodic(self, 15)
self.a2cstr._c = c_profile_2_species(self)
self.run_simulation()
# Periodic inlet flow rate 2.
self.a2cstr._f = f_periodic_2(self, 25)
self.a2cstr._c = c_profile_1_specie(self)
self.run_simulation()
self.a2cstr._f = f_periodic_2(self, 25)
self.a2cstr._c = c_profile_2_species(self)
self.run_simulation()
def test_calculate_dry_run(self):
self.a2cstr.t_cycle = 14.5
# # Constant inlet flow rate.
self.a2cstr._f = f_constant(self, 5)
self.a2cstr._c = c_profile_1_specie(self)
self.a2cstr._calculate()
self.a2cstr._f = f_constant(self, 5)
self.a2cstr._c = c_profile_2_species(self)
self.a2cstr._calculate()
# Box shaped inlet profile.
self.a2cstr._f = f_box_shaped(self, 15)
self.a2cstr._c = c_profile_1_specie(self)
self.a2cstr._calculate()
self.a2cstr._f = f_box_shaped(self, 15)
self.a2cstr._c = c_profile_2_species(self)
self.a2cstr._calculate()
self.a2cstr._f = f_box_shaped(self, 15)
self.a2cstr._c = c_profile_2_species(self)
self.a2cstr.v_leftover = 1.3
self.a2cstr._calculate()
# Periodic inlet flow rate 1.
self.a2cstr._f = f_periodic(self, 15)
self.a2cstr._c = c_profile_1_specie(self)
self.a2cstr._calculate()
self.a2cstr._f = f_periodic(self, 15)
self.a2cstr._c = c_profile_2_species(self)
self.a2cstr._calculate()
# Periodic inlet flow rate 2.
self.a2cstr._f = f_periodic_2(self, 25)
self.a2cstr._c = c_profile_1_specie(self)
self.a2cstr._calculate()
self.a2cstr._f = f_periodic_2(self, 25)
self.a2cstr._c = c_profile_2_species(self)
self.a2cstr._calculate()
def test_ensure_box_shape(self):
def run_test(_f_in, _f_out):
self.a2cstr._f = _f_in.copy()
self.a2cstr._ensure_box_shape()
np.testing.assert_array_almost_equal(
_f_out,
self.a2cstr._f
)
# Keep.
f_in = np.ones_like(self.t) * 3.5
run_test(f_in, f_in)
# Clip start.
f_in[0] *= 0.98
f_out = f_in.copy()
f_out[0] = 0
run_test(f_in, f_out)
# Clip end.
f_in[-1] *= 0.98
f_out[-1] = 0
run_test(f_in, f_out)
# Clip start + delay start.
f_in[:10] = 0
f_in[10] *= 0.98
f_out[:11] = 0
run_test(f_in, f_out)
# Clip end + short end.
f_in[-10:] = 0
f_in[-11] *= 0.98
f_out[-11:] = 0
run_test(f_in, f_out)
# Fix minimal bump.
f_in[20] *= 0.99999
run_test(f_in, f_out)
# Fix minimal bump.
f_in[22] *= 1.00001
run_test(f_in, f_out)
# Do not allow bigger bump.
f_in[25] *= 0.9999
with self.assertRaises(AssertionError):
run_test(f_in, f_out)
# Do not allow bigger bump.
f_in[25] = f_in[24] * 1.0001
with self.assertRaises(AssertionError):
run_test(f_in, f_out)
def test_calculate(self):
mu_uo = MockUpNoSimTwoAlternatingCSTRs(self.t)
mu_uo._calculate()
self.assertTrue([1, 2, 3, 4, 5], mu_uo.f_calls)
|
/**
* List for data storage
* @module echarts/data/List
*/
define(function (require) {
var UNDEFINED = 'undefined';
var globalObj = typeof window === 'undefined' ? global : window;
var Float64Array = typeof globalObj.Float64Array === UNDEFINED
? Array : globalObj.Float64Array;
var Int32Array = typeof globalObj.Int32Array === UNDEFINED
? Array : globalObj.Int32Array;
var dataCtors = {
'float': Float64Array,
'int': Int32Array,
// Ordinal data type can be string or int
'ordinal': Array,
'number': Array,
'time': Array
};
var Model = require('../model/Model');
var DataDiffer = require('./DataDiffer');
var zrUtil = require('zrender/core/util');
var modelUtil = require('../util/model');
var isObject = zrUtil.isObject;
var TRANSFERABLE_PROPERTIES = [
'stackedOn', 'hasItemOption', '_nameList', '_idList', '_rawData'
];
function transferProperties(a, b) {
zrUtil.each(TRANSFERABLE_PROPERTIES.concat(b.__wrappedMethods || []), function (propName) {
if (b.hasOwnProperty(propName)) {
a[propName] = b[propName];
}
});
a.__wrappedMethods = b.__wrappedMethods;
}
function DefaultDataProvider(dataArray) {
this._array = dataArray || [];
}
DefaultDataProvider.prototype.pure = false;
DefaultDataProvider.prototype.count = function () {
return this._array.length;
};
DefaultDataProvider.prototype.getItem = function (idx) {
return this._array[idx];
};
/**
* @constructor
* @alias module:echarts/data/List
*
* @param {Array.<string|Object>} dimensions
* For example, ['someDimName', {name: 'someDimName', type: 'someDimType'}, ...].
* Dimensions should be concrete names like x, y, z, lng, lat, angle, radius
* @param {module:echarts/model/Model} hostModel
*/
var List = function (dimensions, hostModel) {
dimensions = dimensions || ['x', 'y'];
var dimensionInfos = {};
var dimensionNames = [];
for (var i = 0; i < dimensions.length; i++) {
var dimensionName;
var dimensionInfo = {};
if (typeof dimensions[i] === 'string') {
dimensionName = dimensions[i];
dimensionInfo = {
name: dimensionName,
coordDim: dimensionName,
coordDimIndex: 0,
stackable: false,
// Type can be 'float', 'int', 'number'
// Default is number, Precision of float may not enough
type: 'number'
};
}
else {
dimensionInfo = dimensions[i];
dimensionName = dimensionInfo.name;
dimensionInfo.type = dimensionInfo.type || 'number';
if (!dimensionInfo.coordDim) {
dimensionInfo.coordDim = dimensionName;
dimensionInfo.coordDimIndex = 0;
}
}
dimensionInfo.otherDims = dimensionInfo.otherDims || {};
dimensionNames.push(dimensionName);
dimensionInfos[dimensionName] = dimensionInfo;
}
/**
* @readOnly
* @type {Array.<string>}
*/
this.dimensions = dimensionNames;
/**
* Infomation of each data dimension, like data type.
* @type {Object}
*/
this._dimensionInfos = dimensionInfos;
/**
* @type {module:echarts/model/Model}
*/
this.hostModel = hostModel;
/**
* @type {module:echarts/model/Model}
*/
this.dataType;
/**
* Indices stores the indices of data subset after filtered.
* This data subset will be used in chart.
* @type {Array.<number>}
* @readOnly
*/
this.indices = [];
/**
* Data storage
* @type {Object.<key, TypedArray|Array>}
* @private
*/
this._storage = {};
/**
* @type {Array.<string>}
*/
this._nameList = [];
/**
* @type {Array.<string>}
*/
this._idList = [];
/**
* Models of data option is stored sparse for optimizing memory cost
* @type {Array.<module:echarts/model/Model>}
* @private
*/
this._optionModels = [];
/**
* @param {module:echarts/data/List}
*/
this.stackedOn = null;
/**
* Global visual properties after visual coding
* @type {Object}
* @private
*/
this._visual = {};
/**
* Globel layout properties.
* @type {Object}
* @private
*/
this._layout = {};
/**
* Item visual properties after visual coding
* @type {Array.<Object>}
* @private
*/
this._itemVisuals = [];
/**
* Item layout properties after layout
* @type {Array.<Object>}
* @private
*/
this._itemLayouts = [];
/**
* Graphic elemnents
* @type {Array.<module:zrender/Element>}
* @private
*/
this._graphicEls = [];
/**
* @type {Array.<Array|Object>}
* @private
*/
this._rawData;
/**
* @type {Object}
* @private
*/
this._extent;
};
var listProto = List.prototype;
listProto.type = 'list';
/**
* If each data item has it's own option
* @type {boolean}
*/
listProto.hasItemOption = true;
/**
* Get dimension name
* @param {string|number} dim
* Dimension can be concrete names like x, y, z, lng, lat, angle, radius
* Or a ordinal number. For example getDimensionInfo(0) will return 'x' or 'lng' or 'radius'
* @return {string} Concrete dim name.
*/
listProto.getDimension = function (dim) {
if (!isNaN(dim)) {
dim = this.dimensions[dim] || dim;
}
return dim;
};
/**
* Get type and stackable info of particular dimension
* @param {string|number} dim
* Dimension can be concrete names like x, y, z, lng, lat, angle, radius
* Or a ordinal number. For example getDimensionInfo(0) will return 'x' or 'lng' or 'radius'
*/
listProto.getDimensionInfo = function (dim) {
return zrUtil.clone(this._dimensionInfos[this.getDimension(dim)]);
};
/**
* Initialize from data
* @param {Array.<Object|number|Array>} data
* @param {Array.<string>} [nameList]
* @param {Function} [dimValueGetter] (dataItem, dimName, dataIndex, dimIndex) => number
*/
listProto.initData = function (data, nameList, dimValueGetter) {
data = data || [];
var isDataArray = zrUtil.isArray(data);
if (isDataArray) {
data = new DefaultDataProvider(data);
}
if (__DEV__) {
if (!isDataArray && (typeof data.getItem != 'function' || typeof data.count != 'function')) {
throw new Error('Inavlid data provider.');
}
}
this._rawData = data;
// Clear
var storage = this._storage = {};
var indices = this.indices = [];
var dimensions = this.dimensions;
var dimensionInfoMap = this._dimensionInfos;
var size = data.count();
var idList = [];
var nameRepeatCount = {};
var nameDimIdx;
nameList = nameList || [];
// Init storage
for (var i = 0; i < dimensions.length; i++) {
var dimInfo = dimensionInfoMap[dimensions[i]];
dimInfo.otherDims.itemName === 0 && (nameDimIdx = i);
var DataCtor = dataCtors[dimInfo.type];
storage[dimensions[i]] = new DataCtor(size);
}
var self = this;
if (!dimValueGetter) {
self.hasItemOption = false;
}
// Default dim value getter
dimValueGetter = dimValueGetter || function (dataItem, dimName, dataIndex, dimIndex) {
var value = modelUtil.getDataItemValue(dataItem);
// If any dataItem is like { value: 10 }
if (modelUtil.isDataItemOption(dataItem)) {
self.hasItemOption = true;
}
return modelUtil.converDataValue(
(value instanceof Array)
? value[dimIndex]
// If value is a single number or something else not array.
: value,
dimensionInfoMap[dimName]
);
};
for (var i = 0; i < size; i++) {
// NOTICE: Try not to write things into dataItem
var dataItem = data.getItem(i);
// Each data item is value
// [1, 2]
// 2
// Bar chart, line chart which uses category axis
// only gives the 'y' value. 'x' value is the indices of cateogry
// Use a tempValue to normalize the value to be a (x, y) value
// Store the data by dimensions
for (var k = 0; k < dimensions.length; k++) {
var dim = dimensions[k];
var dimStorage = storage[dim];
// PENDING NULL is empty or zero
dimStorage[i] = dimValueGetter(dataItem, dim, i, k);
}
indices.push(i);
}
// Use the name in option and create id
for (var i = 0; i < size; i++) {
var dataItem = data.getItem(i);
if (!nameList[i] && dataItem) {
if (dataItem.name != null) {
nameList[i] = dataItem.name;
}
else if (nameDimIdx != null) {
nameList[i] = storage[dimensions[nameDimIdx]][i];
}
}
var name = nameList[i] || '';
// Try using the id in option
var id = dataItem && dataItem.id;
if (!id && name) {
// Use name as id and add counter to avoid same name
nameRepeatCount[name] = nameRepeatCount[name] || 0;
id = name;
if (nameRepeatCount[name] > 0) {
id += '__ec__' + nameRepeatCount[name];
}
nameRepeatCount[name]++;
}
id && (idList[i] = id);
}
this._nameList = nameList;
this._idList = idList;
};
/**
* @return {number}
*/
listProto.count = function () {
return this.indices.length;
};
/**
* Get value. Return NaN if idx is out of range.
* @param {string} dim Dim must be concrete name.
* @param {number} idx
* @param {boolean} stack
* @return {number}
*/
listProto.get = function (dim, idx, stack) {
var storage = this._storage;
var dataIndex = this.indices[idx];
// If value not exists
if (dataIndex == null || !storage[dim]) {
return NaN;
}
var value = storage[dim][dataIndex];
// FIXME ordinal data type is not stackable
if (stack) {
var dimensionInfo = this._dimensionInfos[dim];
if (dimensionInfo && dimensionInfo.stackable) {
var stackedOn = this.stackedOn;
while (stackedOn) {
// Get no stacked data of stacked on
var stackedValue = stackedOn.get(dim, idx);
// Considering positive stack, negative stack and empty data
if ((value >= 0 && stackedValue > 0) // Positive stack
|| (value <= 0 && stackedValue < 0) // Negative stack
) {
value += stackedValue;
}
stackedOn = stackedOn.stackedOn;
}
}
}
return value;
};
/**
* Get value for multi dimensions.
* @param {Array.<string>} [dimensions] If ignored, using all dimensions.
* @param {number} idx
* @param {boolean} stack
* @return {number}
*/
listProto.getValues = function (dimensions, idx, stack) {
var values = [];
if (!zrUtil.isArray(dimensions)) {
stack = idx;
idx = dimensions;
dimensions = this.dimensions;
}
for (var i = 0, len = dimensions.length; i < len; i++) {
values.push(this.get(dimensions[i], idx, stack));
}
return values;
};
/**
* If value is NaN. Inlcuding '-'
* @param {string} dim
* @param {number} idx
* @return {number}
*/
listProto.hasValue = function (idx) {
var dimensions = this.dimensions;
var dimensionInfos = this._dimensionInfos;
for (var i = 0, len = dimensions.length; i < len; i++) {
if (
// Ordinal type can be string or number
dimensionInfos[dimensions[i]].type !== 'ordinal'
&& isNaN(this.get(dimensions[i], idx))
) {
return false;
}
}
return true;
};
/**
* Get extent of data in one dimension
* @param {string} dim
* @param {boolean} stack
* @param {Function} filter
*/
listProto.getDataExtent = function (dim, stack, filter) {
dim = this.getDimension(dim);
var dimData = this._storage[dim];
var dimInfo = this.getDimensionInfo(dim);
stack = (dimInfo && dimInfo.stackable) && stack;
var dimExtent = (this._extent || (this._extent = {}))[dim + (!!stack)];
var value;
if (dimExtent) {
return dimExtent;
}
// var dimInfo = this._dimensionInfos[dim];
if (dimData) {
var min = Infinity;
var max = -Infinity;
// var isOrdinal = dimInfo.type === 'ordinal';
for (var i = 0, len = this.count(); i < len; i++) {
value = this.get(dim, i, stack);
// FIXME
// if (isOrdinal && typeof value === 'string') {
// value = zrUtil.indexOf(dimData, value);
// }
if (!filter || filter(value, dim, i)) {
value < min && (min = value);
value > max && (max = value);
}
}
return (this._extent[dim + !!stack] = [min, max]);
}
else {
return [Infinity, -Infinity];
}
};
/**
* Get sum of data in one dimension
* @param {string} dim
* @param {boolean} stack
*/
listProto.getSum = function (dim, stack) {
var dimData = this._storage[dim];
var sum = 0;
if (dimData) {
for (var i = 0, len = this.count(); i < len; i++) {
var value = this.get(dim, i, stack);
if (!isNaN(value)) {
sum += value;
}
}
}
return sum;
};
/**
* Retreive the index with given value
* @param {number} idx
* @param {number} value
* @return {number}
*/
// FIXME Precision of float value
listProto.indexOf = function (dim, value) {
var storage = this._storage;
var dimData = storage[dim];
var indices = this.indices;
if (dimData) {
for (var i = 0, len = indices.length; i < len; i++) {
var rawIndex = indices[i];
if (dimData[rawIndex] === value) {
return i;
}
}
}
return -1;
};
/**
* Retreive the index with given name
* @param {number} idx
* @param {number} name
* @return {number}
*/
listProto.indexOfName = function (name) {
var indices = this.indices;
var nameList = this._nameList;
for (var i = 0, len = indices.length; i < len; i++) {
var rawIndex = indices[i];
if (nameList[rawIndex] === name) {
return i;
}
}
return -1;
};
/**
* Retreive the index with given raw data index
* @param {number} idx
* @param {number} name
* @return {number}
*/
listProto.indexOfRawIndex = function (rawIndex) {
// Indices are ascending
var indices = this.indices;
// If rawIndex === dataIndex
var rawDataIndex = indices[rawIndex];
if (rawDataIndex != null && rawDataIndex === rawIndex) {
return rawIndex;
}
var left = 0;
var right = indices.length - 1;
while (left <= right) {
var mid = (left + right) / 2 | 0;
if (indices[mid] < rawIndex) {
left = mid + 1;
}
else if (indices[mid] > rawIndex) {
right = mid - 1;
}
else {
return mid;
}
}
return -1;
};
/**
* Retreive the index of nearest value
* @param {string} dim
* @param {number} value
* @param {boolean} stack If given value is after stacked
* @param {number} [maxDistance=Infinity]
* @return {Array.<number>} Considere multiple points has the same value.
*/
listProto.indicesOfNearest = function (dim, value, stack, maxDistance) {
var storage = this._storage;
var dimData = storage[dim];
var nearestIndices = [];
if (!dimData) {
return nearestIndices;
}
if (maxDistance == null) {
maxDistance = Infinity;
}
var minDist = Number.MAX_VALUE;
var minDiff = -1;
for (var i = 0, len = this.count(); i < len; i++) {
var diff = value - this.get(dim, i, stack);
var dist = Math.abs(diff);
if (diff <= maxDistance && dist <= minDist) {
// For the case of two data are same on xAxis, which has sequence data.
// Show the nearest index
// https://github.com/ecomfe/echarts/issues/2869
if (dist < minDist || (diff >= 0 && minDiff < 0)) {
minDist = dist;
minDiff = diff;
nearestIndices.length = 0;
}
nearestIndices.push(i);
}
}
return nearestIndices;
};
/**
* Get raw data index
* @param {number} idx
* @return {number}
*/
listProto.getRawIndex = function (idx) {
var rawIdx = this.indices[idx];
return rawIdx == null ? -1 : rawIdx;
};
/**
* Get raw data item
* @param {number} idx
* @return {number}
*/
listProto.getRawDataItem = function (idx) {
return this._rawData.getItem(this.getRawIndex(idx));
};
/**
* @param {number} idx
* @param {boolean} [notDefaultIdx=false]
* @return {string}
*/
listProto.getName = function (idx) {
return this._nameList[this.indices[idx]] || '';
};
/**
* @param {number} idx
* @param {boolean} [notDefaultIdx=false]
* @return {string}
*/
listProto.getId = function (idx) {
return this._idList[this.indices[idx]] || (this.getRawIndex(idx) + '');
};
function normalizeDimensions(dimensions) {
if (!zrUtil.isArray(dimensions)) {
dimensions = [dimensions];
}
return dimensions;
}
/**
* Data iteration
* @param {string|Array.<string>}
* @param {Function} cb
* @param {boolean} [stack=false]
* @param {*} [context=this]
*
* @example
* list.each('x', function (x, idx) {});
* list.each(['x', 'y'], function (x, y, idx) {});
* list.each(function (idx) {})
*/
listProto.each = function (dims, cb, stack, context) {
if (typeof dims === 'function') {
context = stack;
stack = cb;
cb = dims;
dims = [];
}
dims = zrUtil.map(normalizeDimensions(dims), this.getDimension, this);
var value = [];
var dimSize = dims.length;
var indices = this.indices;
context = context || this;
for (var i = 0; i < indices.length; i++) {
// Simple optimization
switch (dimSize) {
case 0:
cb.call(context, i);
break;
case 1:
cb.call(context, this.get(dims[0], i, stack), i);
break;
case 2:
cb.call(context, this.get(dims[0], i, stack), this.get(dims[1], i, stack), i);
break;
default:
for (var k = 0; k < dimSize; k++) {
value[k] = this.get(dims[k], i, stack);
}
// Index
value[k] = i;
cb.apply(context, value);
}
}
};
/**
* Data filter
* @param {string|Array.<string>}
* @param {Function} cb
* @param {boolean} [stack=false]
* @param {*} [context=this]
*/
listProto.filterSelf = function (dimensions, cb, stack, context) {
if (typeof dimensions === 'function') {
context = stack;
stack = cb;
cb = dimensions;
dimensions = [];
}
dimensions = zrUtil.map(
normalizeDimensions(dimensions), this.getDimension, this
);
var newIndices = [];
var value = [];
var dimSize = dimensions.length;
var indices = this.indices;
context = context || this;
for (var i = 0; i < indices.length; i++) {
var keep;
// Simple optimization
if (!dimSize) {
keep = cb.call(context, i);
}
else if (dimSize === 1) {
keep = cb.call(
context, this.get(dimensions[0], i, stack), i
);
}
else {
for (var k = 0; k < dimSize; k++) {
value[k] = this.get(dimensions[k], i, stack);
}
value[k] = i;
keep = cb.apply(context, value);
}
if (keep) {
newIndices.push(indices[i]);
}
}
this.indices = newIndices;
// Reset data extent
this._extent = {};
return this;
};
/**
* Data mapping to a plain array
* @param {string|Array.<string>} [dimensions]
* @param {Function} cb
* @param {boolean} [stack=false]
* @param {*} [context=this]
* @return {Array}
*/
listProto.mapArray = function (dimensions, cb, stack, context) {
if (typeof dimensions === 'function') {
context = stack;
stack = cb;
cb = dimensions;
dimensions = [];
}
var result = [];
this.each(dimensions, function () {
result.push(cb && cb.apply(this, arguments));
}, stack, context);
return result;
};
function cloneListForMapAndSample(original, excludeDimensions) {
var allDimensions = original.dimensions;
var list = new List(
zrUtil.map(allDimensions, original.getDimensionInfo, original),
original.hostModel
);
// FIXME If needs stackedOn, value may already been stacked
transferProperties(list, original);
var storage = list._storage = {};
var originalStorage = original._storage;
// Init storage
for (var i = 0; i < allDimensions.length; i++) {
var dim = allDimensions[i];
var dimStore = originalStorage[dim];
if (zrUtil.indexOf(excludeDimensions, dim) >= 0) {
storage[dim] = new dimStore.constructor(
originalStorage[dim].length
);
}
else {
// Direct reference for other dimensions
storage[dim] = originalStorage[dim];
}
}
return list;
}
/**
* Data mapping to a new List with given dimensions
* @param {string|Array.<string>} dimensions
* @param {Function} cb
* @param {boolean} [stack=false]
* @param {*} [context=this]
* @return {Array}
*/
listProto.map = function (dimensions, cb, stack, context) {
dimensions = zrUtil.map(
normalizeDimensions(dimensions), this.getDimension, this
);
var list = cloneListForMapAndSample(this, dimensions);
// Following properties are all immutable.
// So we can reference to the same value
var indices = list.indices = this.indices;
var storage = list._storage;
var tmpRetValue = [];
this.each(dimensions, function () {
var idx = arguments[arguments.length - 1];
var retValue = cb && cb.apply(this, arguments);
if (retValue != null) {
// a number
if (typeof retValue === 'number') {
tmpRetValue[0] = retValue;
retValue = tmpRetValue;
}
for (var i = 0; i < retValue.length; i++) {
var dim = dimensions[i];
var dimStore = storage[dim];
var rawIdx = indices[idx];
if (dimStore) {
dimStore[rawIdx] = retValue[i];
}
}
}
}, stack, context);
return list;
};
/**
* Large data down sampling on given dimension
* @param {string} dimension
* @param {number} rate
* @param {Function} sampleValue
* @param {Function} sampleIndex Sample index for name and id
*/
listProto.downSample = function (dimension, rate, sampleValue, sampleIndex) {
var list = cloneListForMapAndSample(this, [dimension]);
var storage = this._storage;
var targetStorage = list._storage;
var originalIndices = this.indices;
var indices = list.indices = [];
var frameValues = [];
var frameIndices = [];
var frameSize = Math.floor(1 / rate);
var dimStore = targetStorage[dimension];
var len = this.count();
// Copy data from original data
for (var i = 0; i < storage[dimension].length; i++) {
targetStorage[dimension][i] = storage[dimension][i];
}
for (var i = 0; i < len; i += frameSize) {
// Last frame
if (frameSize > len - i) {
frameSize = len - i;
frameValues.length = frameSize;
}
for (var k = 0; k < frameSize; k++) {
var idx = originalIndices[i + k];
frameValues[k] = dimStore[idx];
frameIndices[k] = idx;
}
var value = sampleValue(frameValues);
var idx = frameIndices[sampleIndex(frameValues, value) || 0];
// Only write value on the filtered data
dimStore[idx] = value;
indices.push(idx);
}
return list;
};
/**
* Get model of one data item.
*
* @param {number} idx
*/
// FIXME Model proxy ?
listProto.getItemModel = function (idx) {
var hostModel = this.hostModel;
idx = this.indices[idx];
return new Model(this._rawData.getItem(idx), hostModel, hostModel && hostModel.ecModel);
};
/**
* Create a data differ
* @param {module:echarts/data/List} otherList
* @return {module:echarts/data/DataDiffer}
*/
listProto.diff = function (otherList) {
var idList = this._idList;
var otherIdList = otherList && otherList._idList;
var val;
// Use prefix to avoid index to be the same as otherIdList[idx],
// which will cause weird udpate animation.
var prefix = 'e\0\0';
return new DataDiffer(
otherList ? otherList.indices : [],
this.indices,
function (idx) {
return (val = otherIdList[idx]) != null ? val : prefix + idx;
},
function (idx) {
return (val = idList[idx]) != null ? val : prefix + idx;
}
);
};
/**
* Get visual property.
* @param {string} key
*/
listProto.getVisual = function (key) {
var visual = this._visual;
return visual && visual[key];
};
/**
* Set visual property
* @param {string|Object} key
* @param {*} [value]
*
* @example
* setVisual('color', color);
* setVisual({
* 'color': color
* });
*/
listProto.setVisual = function (key, val) {
if (isObject(key)) {
for (var name in key) {
if (key.hasOwnProperty(name)) {
this.setVisual(name, key[name]);
}
}
return;
}
this._visual = this._visual || {};
this._visual[key] = val;
};
/**
* Set layout property.
* @param {string|Object} key
* @param {*} [val]
*/
listProto.setLayout = function (key, val) {
if (isObject(key)) {
for (var name in key) {
if (key.hasOwnProperty(name)) {
this.setLayout(name, key[name]);
}
}
return;
}
this._layout[key] = val;
};
/**
* Get layout property.
* @param {string} key.
* @return {*}
*/
listProto.getLayout = function (key) {
return this._layout[key];
};
/**
* Get layout of single data item
* @param {number} idx
*/
listProto.getItemLayout = function (idx) {
return this._itemLayouts[idx];
};
/**
* Set layout of single data item
* @param {number} idx
* @param {Object} layout
* @param {boolean=} [merge=false]
*/
listProto.setItemLayout = function (idx, layout, merge) {
this._itemLayouts[idx] = merge
? zrUtil.extend(this._itemLayouts[idx] || {}, layout)
: layout;
};
/**
* Clear all layout of single data item
*/
listProto.clearItemLayouts = function () {
this._itemLayouts.length = 0;
};
/**
* Get visual property of single data item
* @param {number} idx
* @param {string} key
* @param {boolean} [ignoreParent=false]
*/
listProto.getItemVisual = function (idx, key, ignoreParent) {
var itemVisual = this._itemVisuals[idx];
var val = itemVisual && itemVisual[key];
if (val == null && !ignoreParent) {
// Use global visual property
return this.getVisual(key);
}
return val;
};
/**
* Set visual property of single data item
*
* @param {number} idx
* @param {string|Object} key
* @param {*} [value]
*
* @example
* setItemVisual(0, 'color', color);
* setItemVisual(0, {
* 'color': color
* });
*/
listProto.setItemVisual = function (idx, key, value) {
var itemVisual = this._itemVisuals[idx] || {};
this._itemVisuals[idx] = itemVisual;
if (isObject(key)) {
for (var name in key) {
if (key.hasOwnProperty(name)) {
itemVisual[name] = key[name];
}
}
return;
}
itemVisual[key] = value;
};
/**
* Clear itemVisuals and list visual.
*/
listProto.clearAllVisual = function () {
this._visual = {};
this._itemVisuals = [];
};
var setItemDataAndSeriesIndex = function (child) {
child.seriesIndex = this.seriesIndex;
child.dataIndex = this.dataIndex;
child.dataType = this.dataType;
};
/**
* Set graphic element relative to data. It can be set as null
* @param {number} idx
* @param {module:zrender/Element} [el]
*/
listProto.setItemGraphicEl = function (idx, el) {
var hostModel = this.hostModel;
if (el) {
// Add data index and series index for indexing the data by element
// Useful in tooltip
el.dataIndex = idx;
el.dataType = this.dataType;
el.seriesIndex = hostModel && hostModel.seriesIndex;
if (el.type === 'group') {
el.traverse(setItemDataAndSeriesIndex, el);
}
}
this._graphicEls[idx] = el;
};
/**
* @param {number} idx
* @return {module:zrender/Element}
*/
listProto.getItemGraphicEl = function (idx) {
return this._graphicEls[idx];
};
/**
* @param {Function} cb
* @param {*} context
*/
listProto.eachItemGraphicEl = function (cb, context) {
zrUtil.each(this._graphicEls, function (el, idx) {
if (el) {
cb && cb.call(context, el, idx);
}
});
};
/**
* Shallow clone a new list except visual and layout properties, and graph elements.
* New list only change the indices.
*/
listProto.cloneShallow = function () {
var dimensionInfoList = zrUtil.map(this.dimensions, this.getDimensionInfo, this);
var list = new List(dimensionInfoList, this.hostModel);
// FIXME
list._storage = this._storage;
transferProperties(list, this);
// Clone will not change the data extent and indices
list.indices = this.indices.slice();
if (this._extent) {
list._extent = zrUtil.extend({}, this._extent);
}
return list;
};
/**
* Wrap some method to add more feature
* @param {string} methodName
* @param {Function} injectFunction
*/
listProto.wrapMethod = function (methodName, injectFunction) {
var originalMethod = this[methodName];
if (typeof originalMethod !== 'function') {
return;
}
this.__wrappedMethods = this.__wrappedMethods || [];
this.__wrappedMethods.push(methodName);
this[methodName] = function () {
var res = originalMethod.apply(this, arguments);
return injectFunction.apply(this, [res].concat(zrUtil.slice(arguments)));
};
};
// Methods that create a new list based on this list should be listed here.
// Notice that those method should `RETURN` the new list.
listProto.TRANSFERABLE_METHODS = ['cloneShallow', 'downSample', 'map'];
// Methods that change indices of this list should be listed here.
listProto.CHANGABLE_METHODS = ['filterSelf'];
return List;
});
|
import {
deviceAccessHook,
setEnforcementConfig,
userSyncHook,
userIdHook,
makeBidRequestsHook,
validateRules,
enforcementRules,
purpose1Rule,
purpose2Rule,
enableAnalyticsHook,
getGvlid,
internal
} from 'modules/gdprEnforcement.js';
import { config } from 'src/config.js';
import adapterManager, { gdprDataHandler } from 'src/adapterManager.js';
import * as utils from 'src/utils.js';
import { validateStorageEnforcement } from 'src/storageManager.js';
import events from 'src/events.js';
describe('gdpr enforcement', function () {
let nextFnSpy;
let logWarnSpy;
let gdprDataHandlerStub;
let staticConfig = {
cmpApi: 'static',
timeout: 7500,
allowAuctionWithoutConsent: false,
consentData: {
getTCData: {
'tcString': 'COuqj-POu90rDBcBkBENAZCgAPzAAAPAACiQFwwBAABAA1ADEAbQC4YAYAAgAxAG0A',
'cmpId': 92,
'cmpVersion': 100,
'tcfPolicyVersion': 2,
'gdprApplies': true,
'isServiceSpecific': true,
'useNonStandardStacks': false,
'purposeOneTreatment': false,
'publisherCC': 'US',
'cmpStatus': 'loaded',
'eventStatus': 'tcloaded',
'outOfBand': {
'allowedVendors': {},
'discloseVendors': {}
},
'purpose': {
'consents': {
'1': true,
'2': true,
'3': true,
'7': true
},
'legitimateInterests': {
'1': false,
'2': true,
'3': false
}
},
'vendor': {
'consents': {
'1': true,
'2': true,
'3': false,
'4': true,
'5': false
},
'legitimateInterests': {
'1': false,
'2': true,
'3': false,
'4': false,
'5': false
}
},
'specialFeatureOptins': {
'1': false,
'2': false
},
'restrictions': {},
'publisher': {
'consents': {
'1': false,
'2': false,
'3': false
},
'legitimateInterests': {
'1': false,
'2': false,
'3': false
},
'customPurpose': {
'consents': {},
'legitimateInterests': {}
}
}
}
}
};
after(function () {
validateStorageEnforcement.getHooks({ hook: deviceAccessHook }).remove();
$$PREBID_GLOBAL$$.requestBids.getHooks().remove();
adapterManager.makeBidRequests.getHooks({ hook: makeBidRequestsHook }).remove();
})
describe('deviceAccessHook', function () {
let adapterManagerStub;
function getBidderSpec(gvlid) {
return {
getSpec: () => {
return {
gvlid
}
}
}
}
beforeEach(function () {
nextFnSpy = sinon.spy();
gdprDataHandlerStub = sinon.stub(gdprDataHandler, 'getConsentData');
logWarnSpy = sinon.spy(utils, 'logWarn');
adapterManagerStub = sinon.stub(adapterManager, 'getBidAdapter');
});
afterEach(function () {
config.resetConfig();
gdprDataHandler.getConsentData.restore();
logWarnSpy.restore();
adapterManagerStub.restore();
});
it('should not allow device access when device access flag is set to false', function () {
config.setConfig({
deviceAccess: false,
consentManagement: {
gdpr: {
rules: [{
purpose: 'storage',
enforcePurpose: false,
enforceVendor: false,
vendorExceptions: ['appnexus', 'rubicon']
}]
}
}
});
deviceAccessHook(nextFnSpy);
expect(nextFnSpy.calledOnce).to.equal(true);
let result = {
hasEnforcementHook: true,
valid: false
}
sinon.assert.calledWith(nextFnSpy, undefined, undefined, result);
});
it('should only check for consent for vendor exceptions when enforcePurpose and enforceVendor are false', function () {
adapterManagerStub.withArgs('appnexus').returns(getBidderSpec(1));
adapterManagerStub.withArgs('rubicon').returns(getBidderSpec(5));
setEnforcementConfig({
gdpr: {
rules: [{
purpose: 'storage',
enforcePurpose: false,
enforceVendor: false,
vendorExceptions: ['appnexus']
}]
}
});
let consentData = {}
consentData.vendorData = staticConfig.consentData.getTCData;
consentData.gdprApplies = true;
consentData.apiVersion = 2;
gdprDataHandlerStub.returns(consentData);
deviceAccessHook(nextFnSpy, 1, 'appnexus');
deviceAccessHook(nextFnSpy, 5, 'rubicon');
expect(logWarnSpy.callCount).to.equal(0);
});
it('should check consent for all vendors when enforcePurpose and enforceVendor are true', function () {
adapterManagerStub.withArgs('appnexus').returns(getBidderSpec(1));
adapterManagerStub.withArgs('rubicon').returns(getBidderSpec(3));
setEnforcementConfig({
gdpr: {
rules: [{
purpose: 'storage',
enforcePurpose: true,
enforceVendor: true,
}]
}
});
let consentData = {}
consentData.vendorData = staticConfig.consentData.getTCData;
consentData.gdprApplies = true;
consentData.apiVersion = 2;
gdprDataHandlerStub.returns(consentData);
deviceAccessHook(nextFnSpy, 1, 'appnexus');
deviceAccessHook(nextFnSpy, 3, 'rubicon');
expect(logWarnSpy.callCount).to.equal(1);
});
it('should allow device access when gdprApplies is false and hasDeviceAccess flag is true', function () {
adapterManagerStub.withArgs('appnexus').returns(getBidderSpec(1));
setEnforcementConfig({
gdpr: {
rules: [{
purpose: 'storage',
enforcePurpose: true,
enforceVendor: true,
vendorExceptions: []
}]
}
});
let consentData = {}
consentData.vendorData = staticConfig.consentData.getTCData;
consentData.gdprApplies = false;
consentData.apiVersion = 2;
gdprDataHandlerStub.returns(consentData);
deviceAccessHook(nextFnSpy, 1, 'appnexus');
expect(nextFnSpy.calledOnce).to.equal(true);
let result = {
hasEnforcementHook: true,
valid: true
}
sinon.assert.calledWith(nextFnSpy, 1, 'appnexus', result);
});
it('should use gvlMapping set by publisher', function() {
config.setConfig({
'gvlMapping': {
'appnexus': 4
}
});
setEnforcementConfig({
gdpr: {
rules: [{
purpose: 'storage',
enforcePurpose: true,
enforceVendor: true,
vendorExceptions: []
}]
}
});
let consentData = {}
consentData.vendorData = staticConfig.consentData.getTCData;
consentData.gdprApplies = true;
consentData.apiVersion = 2;
gdprDataHandlerStub.returns(consentData);
deviceAccessHook(nextFnSpy, 1, 'appnexus');
expect(nextFnSpy.calledOnce).to.equal(true);
let result = {
hasEnforcementHook: true,
valid: true
}
sinon.assert.calledWith(nextFnSpy, 4, 'appnexus', result);
config.resetConfig();
});
it('should use gvl id of alias and not of parent', function() {
let curBidderStub = sinon.stub(config, 'getCurrentBidder');
curBidderStub.returns('appnexus-alias');
adapterManager.aliasBidAdapter('appnexus', 'appnexus-alias');
config.setConfig({
'gvlMapping': {
'appnexus-alias': 4
}
});
setEnforcementConfig({
gdpr: {
rules: [{
purpose: 'storage',
enforcePurpose: true,
enforceVendor: true,
vendorExceptions: []
}]
}
});
let consentData = {}
consentData.vendorData = staticConfig.consentData.getTCData;
consentData.gdprApplies = true;
consentData.apiVersion = 2;
gdprDataHandlerStub.returns(consentData);
deviceAccessHook(nextFnSpy, 1, 'appnexus');
expect(nextFnSpy.calledOnce).to.equal(true);
let result = {
hasEnforcementHook: true,
valid: true
}
sinon.assert.calledWith(nextFnSpy, 4, 'appnexus', result);
config.resetConfig();
curBidderStub.restore();
});
});
describe('userSyncHook', function () {
let curBidderStub;
let adapterManagerStub;
beforeEach(function () {
gdprDataHandlerStub = sinon.stub(gdprDataHandler, 'getConsentData');
logWarnSpy = sinon.spy(utils, 'logWarn');
curBidderStub = sinon.stub(config, 'getCurrentBidder');
adapterManagerStub = sinon.stub(adapterManager, 'getBidAdapter');
nextFnSpy = sinon.spy();
});
afterEach(function () {
config.getCurrentBidder.restore();
config.resetConfig();
gdprDataHandler.getConsentData.restore();
adapterManager.getBidAdapter.restore();
logWarnSpy.restore();
});
it('should allow bidder to do user sync if consent is true', function () {
setEnforcementConfig({
gdpr: {
rules: [{
purpose: 'storage',
enforcePurpose: false,
enforceVendor: true,
vendorExceptions: ['sampleBidder2']
}]
}
});
let consentData = {}
consentData.vendorData = staticConfig.consentData.getTCData;
consentData.gdprApplies = true;
consentData.apiVersion = 2;
gdprDataHandlerStub.returns(consentData);
curBidderStub.returns('sampleBidder1');
adapterManagerStub.withArgs('sampleBidder1').returns({
getSpec: function () {
return {
'gvlid': 1
}
}
});
userSyncHook(nextFnSpy);
curBidderStub.returns('sampleBidder2');
adapterManagerStub.withArgs('sampleBidder2').returns({
getSpec: function () {
return {
'gvlid': 3
}
}
});
userSyncHook(nextFnSpy);
expect(nextFnSpy.calledTwice).to.equal(true);
});
it('should not allow bidder to do user sync if user has denied consent', function () {
setEnforcementConfig({
gdpr: {
rules: [{
purpose: 'storage',
enforcePurpose: false,
enforceVendor: true,
vendorExceptions: []
}]
}
});
let consentData = {}
consentData.vendorData = staticConfig.consentData.getTCData;
consentData.apiVersion = 2;
consentData.gdprApplies = true;
gdprDataHandlerStub.returns(consentData);
curBidderStub.returns('sampleBidder1');
adapterManagerStub.withArgs('sampleBidder1').returns({
getSpec: function () {
return {
'gvlid': 1
}
}
});
userSyncHook(nextFnSpy);
curBidderStub.returns('sampleBidder2');
adapterManagerStub.withArgs('sampleBidder2').returns({
getSpec: function () {
return {
'gvlid': 3
}
}
});
userSyncHook(nextFnSpy);
expect(nextFnSpy.calledOnce).to.equal(true);
expect(logWarnSpy.callCount).to.equal(1);
});
it('should not check vendor consent when enforceVendor is false', function () {
setEnforcementConfig({
gdpr: {
rules: [{
purpose: 'storage',
enforcePurpose: true,
enforceVendor: false,
vendorExceptions: ['sampleBidder1']
}]
}
});
let consentData = {}
consentData.vendorData = staticConfig.consentData.getTCData;
consentData.apiVersion = 2;
consentData.gdprApplies = true;
gdprDataHandlerStub.returns(consentData);
curBidderStub.returns('sampleBidder1');
adapterManagerStub.withArgs('sampleBidder1').returns({
getSpec: function () {
return {
'gvlid': 1
}
}
});
userSyncHook(nextFnSpy);
curBidderStub.returns('sampleBidder2');
adapterManagerStub.withArgs('sampleBidder2').returns({
getSpec: function () {
return {
'gvlid': 3
}
}
});
userSyncHook(nextFnSpy);
expect(nextFnSpy.calledTwice).to.equal(true);
expect(logWarnSpy.callCount).to.equal(0);
});
});
describe('userIdHook', function () {
beforeEach(function () {
logWarnSpy = sinon.spy(utils, 'logWarn');
nextFnSpy = sinon.spy();
});
afterEach(function () {
config.resetConfig();
logWarnSpy.restore();
});
it('should allow user id module if consent is given', function () {
setEnforcementConfig({
gdpr: {
rules: [{
purpose: 'storage',
enforcePurpose: false,
enforceVendor: true,
vendorExceptions: []
}]
}
});
let consentData = {}
consentData.vendorData = staticConfig.consentData.getTCData;
consentData.apiVersion = 2;
consentData.gdprApplies = true;
let submodules = [{
submodule: {
gvlid: 1,
name: 'sampleUserId'
}
}]
userIdHook(nextFnSpy, submodules, consentData);
// Should pass back hasValidated flag since version 2
const args = nextFnSpy.getCalls()[0].args;
expect(args[1].hasValidated).to.be.true;
expect(nextFnSpy.calledOnce).to.equal(true);
sinon.assert.calledWith(nextFnSpy, submodules, { ...consentData, hasValidated: true });
});
it('should allow userId module if gdpr not in scope', function () {
let submodules = [{
submodule: {
gvlid: 1,
name: 'sampleUserId'
}
}];
let consentData = null;
userIdHook(nextFnSpy, submodules, consentData);
// Should not pass back hasValidated flag since version 2
const args = nextFnSpy.getCalls()[0].args;
expect(args[1]).to.be.null;
expect(nextFnSpy.calledOnce).to.equal(true);
sinon.assert.calledWith(nextFnSpy, submodules, consentData);
});
it('should not allow user id module if user denied consent', function () {
setEnforcementConfig({
gdpr: {
rules: [{
purpose: 'storage',
enforcePurpose: false,
enforceVendor: true,
vendorExceptions: []
}]
}
});
let consentData = {}
consentData.vendorData = staticConfig.consentData.getTCData;
consentData.apiVersion = 2;
consentData.gdprApplies = true;
let submodules = [{
submodule: {
gvlid: 1,
name: 'sampleUserId'
}
}, {
submodule: {
gvlid: 3,
name: 'sampleUserId1'
}
}]
userIdHook(nextFnSpy, submodules, consentData);
expect(logWarnSpy.callCount).to.equal(1);
let expectedSubmodules = [{
submodule: {
gvlid: 1,
name: 'sampleUserId'
}
}]
sinon.assert.calledWith(nextFnSpy, expectedSubmodules, { ...consentData, hasValidated: true });
});
});
describe('makeBidRequestsHook', function () {
let sandbox;
let adapterManagerStub;
let emitEventSpy;
const MOCK_AD_UNITS = [{
code: 'ad-unit-1',
mediaTypes: {},
bids: [{
bidder: 'bidder_1' // has consent
}, {
bidder: 'bidder_2' // doesn't have consent, but liTransparency is true. Bidder remains active.
}]
}, {
code: 'ad-unit-2',
mediaTypes: {},
bids: [{
bidder: 'bidder_2'
}, {
bidder: 'bidder_3'
}]
}];
beforeEach(function () {
sandbox = sinon.createSandbox();
gdprDataHandlerStub = sandbox.stub(gdprDataHandler, 'getConsentData');
adapterManagerStub = sandbox.stub(adapterManager, 'getBidAdapter');
logWarnSpy = sandbox.spy(utils, 'logWarn');
nextFnSpy = sandbox.spy();
emitEventSpy = sandbox.spy(events, 'emit');
});
afterEach(function () {
config.resetConfig();
sandbox.restore();
});
it('should block bidder which does not have consent and allow bidder which has consent (liTransparency is established)', function () {
setEnforcementConfig({
gdpr: {
rules: [{
purpose: 'basicAds',
enforcePurpose: true,
enforceVendor: true,
vendorExceptions: []
}]
}
});
const consentData = {};
consentData.vendorData = staticConfig.consentData.getTCData;
consentData.apiVersion = 2;
consentData.gdprApplies = true;
gdprDataHandlerStub.returns(consentData);
adapterManagerStub.withArgs('bidder_1').returns({
getSpec: function () {
return { 'gvlid': 4 }
}
});
adapterManagerStub.withArgs('bidder_2').returns({
getSpec: function () {
return { 'gvlid': 5 }
}
});
adapterManagerStub.withArgs('bidder_3').returns({
getSpec: function () {
return { 'gvlid': undefined }
}
});
makeBidRequestsHook(nextFnSpy, MOCK_AD_UNITS, []);
// Assertions
expect(nextFnSpy.calledOnce).to.equal(true);
sinon.assert.calledWith(nextFnSpy, [{
code: 'ad-unit-1',
mediaTypes: {},
bids: [
sinon.match({ bidder: 'bidder_1' }),
sinon.match({ bidder: 'bidder_2' })
]
}, {
code: 'ad-unit-2',
mediaTypes: {},
bids: [
sinon.match({ bidder: 'bidder_2' }),
sinon.match({ bidder: 'bidder_3' }) // should be allowed even though it's doesn't have a gvlId because liTransparency is established.
]
}], []);
});
it('should block bidder which does not have consent and allow bidder which has consent (liTransparency is NOT established)', function() {
setEnforcementConfig({
gdpr: {
rules: [{
purpose: 'basicAds',
enforcePurpose: true,
enforceVendor: true,
vendorExceptions: ['bidder_3']
}]
}
});
const consentData = {};
// set li for purpose 2 to false
const newConsentData = utils.deepClone(staticConfig);
newConsentData.consentData.getTCData.purpose.legitimateInterests['2'] = false;
consentData.vendorData = newConsentData.consentData.getTCData;
consentData.apiVersion = 2;
consentData.gdprApplies = true;
gdprDataHandlerStub.returns(consentData);
adapterManagerStub.withArgs('bidder_1').returns({
getSpec: function () {
return { 'gvlid': 4 }
}
});
adapterManagerStub.withArgs('bidder_2').returns({
getSpec: function () {
return { 'gvlid': 5 }
}
});
adapterManagerStub.withArgs('bidder_3').returns({
getSpec: function () {
return { 'gvlid': undefined }
}
});
makeBidRequestsHook(nextFnSpy, MOCK_AD_UNITS, []);
// Assertions
expect(nextFnSpy.calledOnce).to.equal(true);
sinon.assert.calledWith(nextFnSpy, [{
code: 'ad-unit-1',
mediaTypes: {},
bids: [
sinon.match({ bidder: 'bidder_1' }), // 'bidder_2' is not present because it doesn't have vendorConsent
]
}, {
code: 'ad-unit-2',
mediaTypes: {},
bids: [
sinon.match({ bidder: 'bidder_3' }), // 'bidder_3' is allowed despite gvlId being undefined because it's part of vendorExceptions
]
}], []);
expect(logWarnSpy.calledOnce).to.equal(true);
});
it('should skip validation checks if GDPR version is not equal to "2"', function () {
setEnforcementConfig({
gdpr: {
rules: [{
purpose: 'storage',
enforePurpose: false,
enforceVendor: false,
vendorExceptions: []
}]
}
});
const consentData = {};
consentData.vendorData = staticConfig.consentData.getTCData;
consentData.apiVersion = 1;
consentData.gdprApplies = true;
gdprDataHandlerStub.returns(consentData);
makeBidRequestsHook(nextFnSpy, MOCK_AD_UNITS, []);
// Assertions
expect(nextFnSpy.calledOnce).to.equal(true);
sinon.assert.calledWith(nextFnSpy, sinon.match.array.deepEquals(MOCK_AD_UNITS), []);
expect(emitEventSpy.notCalled).to.equal(true);
expect(logWarnSpy.notCalled).to.equal(true);
});
});
describe('enableAnalyticsHook', function () {
let sandbox;
let adapterManagerStub;
const MOCK_ANALYTICS_ADAPTER_CONFIG = [{
provider: 'analyticsAdapter_A',
options: {}
}, {
provider: 'analyticsAdapter_B',
options: {}
}, {
provider: 'analyticsAdapter_C',
options: {}
}];
beforeEach(function () {
sandbox = sinon.createSandbox();
gdprDataHandlerStub = sandbox.stub(gdprDataHandler, 'getConsentData');
adapterManagerStub = sandbox.stub(adapterManager, 'getAnalyticsAdapter');
logWarnSpy = sandbox.spy(utils, 'logWarn');
nextFnSpy = sandbox.spy();
});
afterEach(function() {
config.resetConfig();
sandbox.restore();
});
it('should block analytics adapter which does not have consent and allow the one(s) which have consent', function() {
setEnforcementConfig({
gdpr: {
rules: [{
purpose: 'measurement',
enforcePurpose: true,
enforceVendor: true,
vendorExceptions: ['analyticsAdapter_B']
}]
}
});
const consentData = {};
consentData.vendorData = staticConfig.consentData.getTCData;
consentData.apiVersion = 2;
consentData.gdprApplies = true;
gdprDataHandlerStub.returns(consentData);
adapterManagerStub.withArgs('analyticsAdapter_A').returns({ gvlid: 3 });
adapterManagerStub.withArgs('analyticsAdapter_B').returns({ gvlid: 5 });
adapterManagerStub.withArgs('analyticsAdapter_C').returns({ gvlid: 1 });
enableAnalyticsHook(nextFnSpy, MOCK_ANALYTICS_ADAPTER_CONFIG);
// Assertions
expect(nextFnSpy.calledOnce).to.equal(true);
sinon.assert.calledWith(nextFnSpy, [{
provider: 'analyticsAdapter_B',
options: {}
}, {
provider: 'analyticsAdapter_C',
options: {}
}]);
expect(logWarnSpy.calledOnce).to.equal(true);
});
});
describe('validateRules', function () {
const createGdprRule = (purposeName = 'storage', enforcePurpose = true, enforceVendor = true, vendorExceptions = []) => ({
purpose: purposeName,
enforcePurpose: enforcePurpose,
enforceVendor: enforceVendor,
vendorExceptions: vendorExceptions
});
const consentData = {
vendorData: staticConfig.consentData.getTCData,
apiVersion: 2,
gdprApplies: true
};
// Bidder - 'bidderA' has vendorConsent
const vendorAllowedModule = 'bidderA';
const vendorAllowedGvlId = 1;
// Bidder = 'bidderB' doesn't have vendorConsent
const vendorBlockedModule = 'bidderB';
const vendorBlockedGvlId = 3;
const consentDataWithPurposeConsentFalse = utils.deepClone(consentData);
consentDataWithPurposeConsentFalse.vendorData.purpose.consents['1'] = false;
it('should return true when enforcePurpose=true AND purposeConsent[p]==true AND enforceVendor[p,v]==true AND vendorConsent[v]==true', function () {
// 'enforcePurpose' and 'enforceVendor' both are 'true'
const gdprRule = createGdprRule('storage', true, true, []);
// case 1 - Both purpose consent and vendor consent is 'true'. validateRules must return 'true'
let isAllowed = validateRules(gdprRule, consentData, vendorAllowedModule, vendorAllowedGvlId);
expect(isAllowed).to.equal(true);
// case 2 - Purpose consent is 'true' but vendor consent is 'false'. validateRules must return 'false'
isAllowed = validateRules(gdprRule, consentData, vendorBlockedModule, vendorBlockedGvlId);
expect(isAllowed).to.equal(false);
// case 3 - Purpose consent is 'false' but vendor consent is 'true'. validateRules must return 'false'
isAllowed = validateRules(gdprRule, consentDataWithPurposeConsentFalse, vendorAllowedModule, vendorAllowedGvlId);
expect(isAllowed).to.equal(false);
// case 4 - Both purpose consent and vendor consent is 'false'. validateRules must return 'false'
isAllowed = validateRules(gdprRule, consentDataWithPurposeConsentFalse, vendorBlockedModule, vendorBlockedGvlId);
expect(isAllowed).to.equal(false);
});
it('should return true when enforcePurpose=true AND purposeConsent[p]==true AND enforceVendor[p,v]==false', function () {
// 'enforcePurpose' is 'true' and 'enforceVendor' is 'false'
const gdprRule = createGdprRule('storage', true, false, []);
// case 1 - Both purpose consent and vendor consent is 'true'. validateRules must return 'true'
let isAllowed = validateRules(gdprRule, consentData, vendorAllowedModule, vendorAllowedGvlId);
expect(isAllowed).to.equal(true);
// case 2 - Purpose consent is 'true' but vendor consent is 'false'. validateRules must return 'true' because vendorConsent doens't matter
isAllowed = validateRules(gdprRule, consentData, vendorBlockedModule, vendorBlockedGvlId);
expect(isAllowed).to.equal(true);
// case 3 - Purpose consent is 'false' but vendor consent is 'true'. validateRules must return 'false' because vendorConsent doesn't matter
isAllowed = validateRules(gdprRule, consentDataWithPurposeConsentFalse, vendorAllowedModule, vendorAllowedGvlId);
expect(isAllowed).to.equal(false);
// case 4 - Both purpose consent and vendor consent is 'false'. validateRules must return 'false' and vendorConsent doesn't matter
isAllowed = validateRules(gdprRule, consentDataWithPurposeConsentFalse, vendorBlockedModule, vendorAllowedGvlId);
expect(isAllowed).to.equal(false);
});
it('should return true when enforcePurpose=false AND enforceVendor[p,v]==true AND vendorConsent[v]==true', function () {
// 'enforcePurpose' is 'false' and 'enforceVendor' is 'true'
const gdprRule = createGdprRule('storage', false, true, []);
// case 1 - Both purpose consent and vendor consent is 'true'. validateRules must return 'true'
let isAllowed = validateRules(gdprRule, consentData, vendorAllowedModule, vendorAllowedGvlId);
expect(isAllowed).to.equal(true);
// case 2 - Purpose consent is 'true' but vendor consent is 'false'. validateRules must return 'false' because purposeConsent doesn't matter
isAllowed = validateRules(gdprRule, consentData, vendorBlockedModule, vendorBlockedGvlId);
expect(isAllowed).to.equal(false);
// case 3 - urpose consent is 'false' but vendor consent is 'true'. validateRules must return 'true' because purposeConsent doesn't matter
isAllowed = validateRules(gdprRule, consentDataWithPurposeConsentFalse, vendorAllowedModule, vendorAllowedGvlId);
expect(isAllowed).to.equal(true);
// case 4 - Both purpose consent and vendor consent is 'false'. validateRules must return 'false' and purposeConsent doesn't matter
isAllowed = validateRules(gdprRule, consentDataWithPurposeConsentFalse, vendorBlockedModule, vendorBlockedGvlId);
expect(isAllowed).to.equal(false);
});
it('should return true when enforcePurpose=false AND enforceVendor[p,v]==false', function () {
// 'enforcePurpose' is 'false' and 'enforceVendor' is 'false'
const gdprRule = createGdprRule('storage', false, false, []);
// case 1 - Both purpose consent and vendor consent is 'true'. validateRules must return 'true', both the consents do not matter.
let isAllowed = validateRules(gdprRule, consentData, vendorAllowedModule, vendorAllowedGvlId);
expect(isAllowed).to.equal(true);
// case 2 - Purpose consent is 'true' but vendor consent is 'false'. validateRules must return 'true', both the consents do not matter.
isAllowed = validateRules(gdprRule, consentData, vendorBlockedModule, vendorBlockedGvlId);
expect(isAllowed).to.equal(true);
// case 3 - urpose consent is 'false' but vendor consent is 'true'. validateRules must return 'true', both the consents do not matter.
isAllowed = validateRules(gdprRule, consentDataWithPurposeConsentFalse, vendorAllowedModule, vendorAllowedGvlId);
expect(isAllowed).to.equal(true);
// case 4 - Both purpose consent and vendor consent is 'false'. validateRules must return 'true', both the consents do not matter.
isAllowed = validateRules(gdprRule, consentDataWithPurposeConsentFalse, vendorBlockedModule, vendorBlockedGvlId);
expect(isAllowed).to.equal(true);
});
it('should return true when "vendorExceptions" contains the name of the vendor under test', function () {
// 'vendorExceptions' contains 'bidderB' which doesn't have vendor consent.
const gdprRule = createGdprRule('storage', false, true, [vendorBlockedModule]);
/* 'bidderB' gets a free pass since it's included in the 'vendorExceptions' array. validateRules must disregard
user's choice for purpose and vendor consent and return 'true' for this bidder(s) */
const isAllowed = validateRules(gdprRule, consentData, vendorBlockedModule, vendorBlockedGvlId);
expect(isAllowed).to.equal(true);
});
describe('Purpose 2 special case', function () {
const consentDataWithLIFalse = utils.deepClone(consentData);
consentDataWithLIFalse.vendorData.purpose.legitimateInterests['2'] = false;
const consentDataWithPurposeConsentFalse = utils.deepClone(consentData);
consentDataWithPurposeConsentFalse.vendorData.purpose.consents['2'] = false;
const consentDataWithPurposeConsentFalseAndLIFalse = utils.deepClone(consentData);
consentDataWithPurposeConsentFalseAndLIFalse.vendorData.purpose.legitimateInterests['2'] = false;
consentDataWithPurposeConsentFalseAndLIFalse.vendorData.purpose.consents['2'] = false;
it('should return true when (enforcePurpose=true AND purposeConsent[p]===true AND enforceVendor[p.v]===true AND vendorConsent[v]===true) OR (purposesLITransparency[p]===true)', function () {
// both 'enforcePurpose' and 'enforceVendor' is 'true'
const gdprRule = createGdprRule('basicAds', true, true, []);
// case 1 - Both purpose consent and vendor consent is 'true', but legitimateInterests for purpose 2 is 'false'. validateRules must return 'true'.
let isAllowed = validateRules(gdprRule, consentDataWithLIFalse, vendorAllowedModule, vendorAllowedGvlId);
expect(isAllowed).to.equal(true);
// case 2 - Purpose consent is 'true' but vendor consent is 'false', but legitimateInterests for purpose 2 is 'true'. validateRules must return 'true'.
isAllowed = validateRules(gdprRule, consentData, vendorBlockedModule, vendorBlockedGvlId);
expect(isAllowed).to.equal(true);
// case 3 - Purpose consent is 'true' and vendor consent is 'true', as well as legitimateInterests for purpose 2 is 'true'. validateRules must return 'true'.
isAllowed = validateRules(gdprRule, consentData, vendorAllowedModule, vendorAllowedGvlId);
expect(isAllowed).to.equal(true);
// case 4 - Purpose consent is 'true' and vendor consent is 'false', and legitimateInterests for purpose 2 is 'false'. validateRules must return 'false'.
isAllowed = validateRules(gdprRule, consentDataWithLIFalse, vendorBlockedModule, vendorBlockedGvlId);
expect(isAllowed).to.equal(false);
});
it('should return true when (enforcePurpose=true AND purposeConsent[p]===true AND enforceVendor[p.v]===false) OR (purposesLITransparency[p]===true)', function () {
// 'enforcePurpose' is 'true' and 'enforceVendor' is 'false'
const gdprRule = createGdprRule('basicAds', true, false, []);
// case 1 - Purpose consent is 'true', vendor consent doesn't matter and legitimateInterests for purpose 2 is 'true'. validateRules must return 'true'.
let isAllowed = validateRules(gdprRule, consentData, vendorBlockedModule, vendorBlockedGvlId);
expect(isAllowed).to.equal(true);
// case 2 - Purpose consent is 'false', vendor consent doesn't matter and legitimateInterests for purpose 2 is 'true'. validateRules must return 'true'.
isAllowed = validateRules(gdprRule, consentDataWithPurposeConsentFalse, vendorAllowedModule, vendorAllowedGvlId);
expect(isAllowed).to.equal(true);
// case 3 - Purpose consent is 'false', vendor consent doesn't matter and legitimateInterests for purpose 2 is 'false'. validateRules must return 'false'.
isAllowed = validateRules(gdprRule, consentDataWithPurposeConsentFalseAndLIFalse, vendorAllowedModule, vendorAllowedGvlId);
expect(isAllowed).to.equal(false);
});
it('should return true when (enforcePurpose=false AND enforceVendor[p,v]===true AND vendorConsent[v]===true) OR (purposesLITransparency[p]===true)', function () {
// 'enforcePurpose' is 'false' and 'enforceVendor' is 'true'
const gdprRule = createGdprRule('basicAds', false, true, []);
// case - 1 Vendor consent is 'true', purpose consent doesn't matter and legitimateInterests for purpose 2 is 'true'. validateRules must return 'true'.
let isAllowed = validateRules(gdprRule, consentData, vendorAllowedModule, vendorAllowedGvlId);
expect(isAllowed).to.equal(true);
// case 2 - Vendor consent is 'false', purpose consent doesn't matter and legitimateInterests for purpose 2 is 'true'. validateRules must return 'true'.
isAllowed = validateRules(gdprRule, consentData, vendorBlockedModule, vendorBlockedGvlId);
expect(isAllowed).to.equal(true);
// case 3 - Vendor consent is 'false', purpose consent doesn't matter and legitimateInterests for purpose 2 is 'false'. validateRules must return 'false'.
isAllowed = validateRules(gdprRule, consentDataWithLIFalse, vendorBlockedModule, vendorBlockedGvlId);
expect(isAllowed).to.equal(false);
});
});
})
describe('setEnforcementConfig', function () {
let sandbox;
const DEFAULT_RULES = [{
purpose: 'storage',
enforcePurpose: true,
enforceVendor: true,
vendorExceptions: []
}, {
purpose: 'basicAds',
enforcePurpose: true,
enforceVendor: true,
vendorExceptions: []
}];
beforeEach(function () {
sandbox = sinon.createSandbox();
logWarnSpy = sandbox.spy(utils, 'logWarn');
});
afterEach(function () {
config.resetConfig();
sandbox.restore();
});
it('should enforce TCF2 Purpose1 and Purpose 2 if no "rules" found in the config', function () {
setEnforcementConfig({
gdpr: {
cmpApi: 'iab',
allowAuctionWithoutConsent: true,
timeout: 5000
}
});
expect(logWarnSpy.calledOnce).to.equal(true);
expect(enforcementRules).to.deep.equal(DEFAULT_RULES);
});
it('should enforce TCF2 Purpose 2 also if only Purpose 1 is defined in "rules"', function () {
const purpose1RuleDefinedInConfig = {
purpose: 'storage',
enforcePurpose: false,
enforceVendor: true,
vendorExceptions: ['bidderA']
}
setEnforcementConfig({
gdpr: {
rules: [purpose1RuleDefinedInConfig]
}
});
expect(purpose1Rule).to.deep.equal(purpose1RuleDefinedInConfig);
expect(purpose2Rule).to.deep.equal(DEFAULT_RULES[1]);
});
it('should enforce TCF2 Purpose 1 also if only Purpose 2 is defined in "rules"', function () {
const purpose2RuleDefinedInConfig = {
purpose: 'basicAds',
enforcePurpose: false,
enforceVendor: true,
vendorExceptions: ['bidderA']
}
setEnforcementConfig({
gdpr: {
rules: [purpose2RuleDefinedInConfig]
}
});
expect(purpose1Rule).to.deep.equal(DEFAULT_RULES[0]);
expect(purpose2Rule).to.deep.equal(purpose2RuleDefinedInConfig);
});
it('should use the "rules" defined in config if a definition found', function() {
const rules = [{
purpose: 'storage',
enforcePurpose: false,
enforceVendor: false
}, {
purpose: 'basicAds',
enforcePurpose: false,
enforceVendor: false
}]
setEnforcementConfig({gdpr: { rules }});
expect(enforcementRules).to.deep.equal(rules);
});
});
describe('TCF2FinalResults', function() {
let sandbox;
beforeEach(function() {
sandbox = sinon.createSandbox();
sandbox.spy(events, 'emit');
});
afterEach(function() {
config.resetConfig();
sandbox.restore();
});
it('should emit TCF2 enforcement data on auction end', function() {
const rules = [{
purpose: 'storage',
enforcePurpose: false,
enforceVendor: false
}, {
purpose: 'basicAds',
enforcePurpose: false,
enforceVendor: false
}]
setEnforcementConfig({gdpr: { rules }});
events.emit('auctionEnd', {})
// Assertions
sinon.assert.calledWith(events.emit.getCall(1), 'tcf2Enforcement', sinon.match.object);
})
});
describe('getGvlid', function() {
let sandbox;
let getGvlidForBidAdapterStub;
let getGvlidForUserIdModuleStub;
let getGvlidForAnalyticsAdapterStub;
beforeEach(function() {
sandbox = sinon.createSandbox();
getGvlidForBidAdapterStub = sandbox.stub(internal, 'getGvlidForBidAdapter');
getGvlidForUserIdModuleStub = sandbox.stub(internal, 'getGvlidForUserIdModule');
getGvlidForAnalyticsAdapterStub = sandbox.stub(internal, 'getGvlidForAnalyticsAdapter');
});
afterEach(function() {
sandbox.restore();
config.resetConfig();
});
it('should return "null" if called without passing any argument', function() {
const gvlid = getGvlid();
expect(gvlid).to.equal(null);
});
it('should return "null" if GVL ID is not defined for any of these modules: Bid adapter, UserId submodule and Analytics adapter', function() {
getGvlidForBidAdapterStub.withArgs('moduleA').returns(null);
getGvlidForUserIdModuleStub.withArgs('moduleA').returns(null);
getGvlidForAnalyticsAdapterStub.withArgs('moduleA').returns(null);
const gvlid = getGvlid('moduleA');
expect(gvlid).to.equal(null);
});
it('should return the GVL ID from gvlMapping if it is defined in setConfig', function() {
config.setConfig({
gvlMapping: {
moduleA: 1
}
});
// Actual GVL ID for moduleA is 2, as defined on its the bidAdapter.js file.
getGvlidForBidAdapterStub.withArgs('moduleA').returns(2);
const gvlid = getGvlid('moduleA');
expect(gvlid).to.equal(1);
});
it('should return the GVL ID by calling getGvlidForBidAdapter -> getGvlidForUserIdModule -> getGvlidForAnalyticsAdapter in sequence', function() {
getGvlidForBidAdapterStub.withArgs('moduleA').returns(null);
getGvlidForUserIdModuleStub.withArgs('moduleA').returns(null);
getGvlidForAnalyticsAdapterStub.withArgs('moduleA').returns(7);
expect(getGvlid('moduleA')).to.equal(7);
});
});
});
|
#ifdef __OBJC__
#import <UIKit/UIKit.h>
#else
#ifndef FOUNDATION_EXPORT
#if defined(__cplusplus)
#define FOUNDATION_EXPORT extern "C"
#else
#define FOUNDATION_EXPORT extern
#endif
#endif
#endif
FOUNDATION_EXPORT double AYSegmentedControlsVersionNumber;
FOUNDATION_EXPORT const unsigned char AYSegmentedControlsVersionString[];
|
/*
* Generated by util/mkerr.pl DO NOT EDIT
* Copyright 1995-2020 The OpenSSL Project Authors. All Rights Reserved.
*
* Licensed under the Apache License 2.0 (the "License"). You may not use
* this file except in compliance with the License. You can obtain a copy
* in the file LICENSE in the source distribution or at
* https://www.openssl.org/source/license.html
*/
#include <openssl/err.h>
#include <openssl/cryptoerr.h>
#include "crypto/cryptoerr.h"
#ifndef OPENSSL_NO_ERR
static const ERR_STRING_DATA CRYPTO_str_reasons[] = {
{ERR_PACK(ERR_LIB_CRYPTO, 0, CRYPTO_R_BAD_ALGORITHM_NAME),
"bad algorithm name"},
{ERR_PACK(ERR_LIB_CRYPTO, 0, CRYPTO_R_CONFLICTING_NAMES),
"conflicting names"},
{ERR_PACK(ERR_LIB_CRYPTO, 0, CRYPTO_R_FIPS_MODE_NOT_SUPPORTED),
"fips mode not supported"},
{ERR_PACK(ERR_LIB_CRYPTO, 0, CRYPTO_R_HEX_STRING_TOO_SHORT),
"hex string too short"},
{ERR_PACK(ERR_LIB_CRYPTO, 0, CRYPTO_R_ILLEGAL_HEX_DIGIT),
"illegal hex digit"},
{ERR_PACK(ERR_LIB_CRYPTO, 0, CRYPTO_R_INSUFFICIENT_DATA_SPACE),
"insufficient data space"},
{ERR_PACK(ERR_LIB_CRYPTO, 0, CRYPTO_R_INSUFFICIENT_PARAM_SIZE),
"insufficient param size"},
{ERR_PACK(ERR_LIB_CRYPTO, 0, CRYPTO_R_INSUFFICIENT_SECURE_DATA_SPACE),
"insufficient secure data space"},
{ERR_PACK(ERR_LIB_CRYPTO, 0, CRYPTO_R_INVALID_NULL_ARGUMENT),
"invalid null argument"},
{ERR_PACK(ERR_LIB_CRYPTO, 0, CRYPTO_R_INVALID_OSSL_PARAM_TYPE),
"invalid ossl param type"},
{ERR_PACK(ERR_LIB_CRYPTO, 0, CRYPTO_R_ODD_NUMBER_OF_DIGITS),
"odd number of digits"},
{ERR_PACK(ERR_LIB_CRYPTO, 0, CRYPTO_R_PROVIDER_ALREADY_EXISTS),
"provider already exists"},
{ERR_PACK(ERR_LIB_CRYPTO, 0, CRYPTO_R_PROVIDER_SECTION_ERROR),
"provider section error"},
{ERR_PACK(ERR_LIB_CRYPTO, 0, CRYPTO_R_RANDOM_SECTION_ERROR),
"random section error"},
{ERR_PACK(ERR_LIB_CRYPTO, 0, CRYPTO_R_SECURE_MALLOC_FAILURE),
"secure malloc failure"},
{ERR_PACK(ERR_LIB_CRYPTO, 0, CRYPTO_R_STRING_TOO_LONG), "string too long"},
{ERR_PACK(ERR_LIB_CRYPTO, 0, CRYPTO_R_TOO_MANY_BYTES), "too many bytes"},
{ERR_PACK(ERR_LIB_CRYPTO, 0, CRYPTO_R_TOO_MANY_RECORDS),
"too many records"},
{ERR_PACK(ERR_LIB_CRYPTO, 0, CRYPTO_R_TOO_SMALL_BUFFER),
"too small buffer"},
{ERR_PACK(ERR_LIB_CRYPTO, 0, CRYPTO_R_UNKNOWN_NAME_IN_RANDOM_SECTION),
"unknown name in random section"},
{ERR_PACK(ERR_LIB_CRYPTO, 0, CRYPTO_R_ZERO_LENGTH_NUMBER),
"zero length number"},
{0, NULL}
};
#endif
int err_load_CRYPTO_strings_int(void)
{
#ifndef OPENSSL_NO_ERR
if (ERR_reason_error_string(CRYPTO_str_reasons[0].error) == NULL)
ERR_load_strings_const(CRYPTO_str_reasons);
#endif
return 1;
}
|
# 4-3 Counting to Twenty
for value in range(1, 21):
print(value)
|
ace.define("ace/theme/github",["require","exports","module","ace/lib/dom"],function(e,t,n){t.isDark=!1,t.cssClass="ace-builds-github",t.cssText='.ace-builds-github .ace_gutter {background: #e8e8e8;color: #AAA;}.ace-builds-github {background: #fff;color: #000;}.ace-builds-github .ace_keyword {font-weight: bold;}.ace-builds-github .ace_string {color: #D14;}.ace-builds-github .ace_variable.ace_class {color: teal;}.ace-builds-github .ace_constant.ace_numeric {color: #099;}.ace-builds-github .ace_constant.ace_buildin {color: #0086B3;}.ace-builds-github .ace_support.ace_function {color: #0086B3;}.ace-builds-github .ace_comment {color: #998;font-style: italic;}.ace-builds-github .ace_variable.ace_language {color: #0086B3;}.ace-builds-github .ace_paren {font-weight: bold;}.ace-builds-github .ace_boolean {font-weight: bold;}.ace-builds-github .ace_string.ace_regexp {color: #009926;font-weight: normal;}.ace-builds-github .ace_variable.ace_instance {color: teal;}.ace-builds-github .ace_constant.ace_language {font-weight: bold;}.ace-builds-github .ace_cursor {color: black;}.ace-builds-github.ace_focus .ace_marker-layer .ace_active-line {background: rgb(255, 255, 204);}.ace-builds-github .ace_marker-layer .ace_active-line {background: rgb(245, 245, 245);}.ace-builds-github .ace_marker-layer .ace_selection {background: rgb(181, 213, 255);}.ace-builds-github.ace_multiselect .ace_selection.ace_start {box-shadow: 0 0 3px 0px white;}.ace-builds-github.ace_nobold .ace_line > span {font-weight: normal !important;}.ace-builds-github .ace_marker-layer .ace_step {background: rgb(252, 255, 0);}.ace-builds-github .ace_marker-layer .ace_stack {background: rgb(164, 229, 101);}.ace-builds-github .ace_marker-layer .ace_bracket {margin: -1px 0 0 -1px;border: 1px solid rgb(192, 192, 192);}.ace-builds-github .ace_gutter-active-line {background-color : rgba(0, 0, 0, 0.07);}.ace-builds-github .ace_marker-layer .ace_selected-word {background: rgb(250, 250, 255);border: 1px solid rgb(200, 200, 250);}.ace-builds-github .ace_invisible {color: #BFBFBF}.ace-builds-github .ace_print-margin {width: 1px;background: #e8e8e8;}.ace-builds-github .ace_indent-guide {background: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAACCAYAAACZgbYnAAAAE0lEQVQImWP4////f4bLly//BwAmVgd1/w11/gAAAABJRU5ErkJggg==") right repeat-y;}';var r=e("../lib/dom");r.importCssString(t.cssText,t.cssClass)}); (function() {
ace.require(["ace/theme/github"], function(m) {
if (typeof module == "object" && typeof exports == "object" && module) {
module.exports = m;
}
});
})();
|
(function($, Drupal) {
/* SELECT2
----------------------- */
Drupal.behaviors.advancedSelect = {
attach: function (context, settings) {
$("select", context).once('selects').each(function(){
$( 'form:not(.entity-embed-dialog):not(.entity-form-display-form):not(.entity-view-display-form):not(.layout-builder-add-block):not(.layout-builder-update-block) select' ).select2({
placeholder: "Select an option"
});
$(".path-node .js-form-type-select", context).once('selectAccessiblity').each(function(){
$(document).ready(function(){
$('.select2-search__field').each(function(){
var label = $(this).closest('.select2-container').siblings('label').text();
$(this).attr('aria-label',label).removeAttr('role');
});
});
});
});
}
};
/* BLOCK LAYOUT PATH CHANGE
----------------------- */
Drupal.behaviors.blockLink = {
attach: function (context, settings) {
$('.role-site_manager:not(.role-administrator) a[href="/admin/structure/block"]', context).once('changeBlockUIPath').each(function(){
$(this).attr('href','/admin/structure/block/block-content').text('Custom Blocks');
if($(this).closest('li.tabs__tab').length){
$(this).closest('li.tabs__tab').remove();
}
$('a[href="/admin/structure/block/block-content/types"]').closest('li.tabs__tab').remove();
$('.admin-list a[href="/admin/structure/block/block-content"]').wrapInner('<span class="label">').append('<div class="description">Manage blocks in the custom block library.</div>');
});
}
};
})(jQuery, Drupal);
|
/*
============================================================================================
Big include file for all the distinct FEM basis function classes.
NOTE: portions of this code are automatically generated!
Copyright (c) 02-17-2011, Shawn W. Walker
============================================================================================
*/
#include "Abstract_FEM_Function.cc"
#include "basis_function_computations.h"
/*------------ BEGIN: Auto Generate ------------*/
#include "Data_Type_CONST_ONE_phi.cc"
#include "Data_Type_M_Space_phi_restricted_to_Gamma.cc"
#include "Data_Type_U_Space_phi_restricted_to_Gamma.cc"
#include "Data_Type_Vector_P1_phi_restricted_to_Gamma.cc"
/*------------ END: Auto Generate ------------*/
/***/
|
# Copyright (C) 2003 Python Software Foundation
import unittest
import shutil
import tempfile
import sys
import stat
import os
import os.path
import errno
import functools
import subprocess
from test import support
from test.support import TESTFN
from os.path import splitdrive
from distutils.spawn import find_executable, spawn
from shutil import (_make_tarball, _make_zipfile, make_archive,
register_archive_format, unregister_archive_format,
get_archive_formats, Error, unpack_archive,
register_unpack_format, RegistryError,
unregister_unpack_format, get_unpack_formats,
SameFileError)
import tarfile
import warnings
from test import support
from test.support import TESTFN, check_warnings, captured_stdout, requires_zlib
try:
import bz2
BZ2_SUPPORTED = True
except ImportError:
BZ2_SUPPORTED = False
TESTFN2 = TESTFN + "2"
try:
import grp
import pwd
UID_GID_SUPPORT = True
except ImportError:
UID_GID_SUPPORT = False
try:
import zipfile
ZIP_SUPPORT = True
except ImportError:
ZIP_SUPPORT = find_executable('zip')
def _fake_rename(*args, **kwargs):
# Pretend the destination path is on a different filesystem.
raise OSError(getattr(errno, 'EXDEV', 18), "Invalid cross-device link")
def mock_rename(func):
@functools.wraps(func)
def wrap(*args, **kwargs):
try:
builtin_rename = os.rename
os.rename = _fake_rename
return func(*args, **kwargs)
finally:
os.rename = builtin_rename
return wrap
def write_file(path, content, binary=False):
"""Write *content* to a file located at *path*.
If *path* is a tuple instead of a string, os.path.join will be used to
make a path. If *binary* is true, the file will be opened in binary
mode.
"""
if isinstance(path, tuple):
path = os.path.join(*path)
with open(path, 'wb' if binary else 'w') as fp:
fp.write(content)
def read_file(path, binary=False):
"""Return contents from a file located at *path*.
If *path* is a tuple instead of a string, os.path.join will be used to
make a path. If *binary* is true, the file will be opened in binary
mode.
"""
if isinstance(path, tuple):
path = os.path.join(*path)
with open(path, 'rb' if binary else 'r') as fp:
return fp.read()
class TestShutil(unittest.TestCase):
def setUp(self):
super(TestShutil, self).setUp()
self.tempdirs = []
def tearDown(self):
super(TestShutil, self).tearDown()
while self.tempdirs:
d = self.tempdirs.pop()
shutil.rmtree(d, os.name in ('nt', 'cygwin'))
def mkdtemp(self):
"""Create a temporary directory that will be cleaned up.
Returns the path of the directory.
"""
d = tempfile.mkdtemp()
self.tempdirs.append(d)
return d
def test_rmtree_works_on_bytes(self):
tmp = self.mkdtemp()
victim = os.path.join(tmp, 'killme')
os.mkdir(victim)
write_file(os.path.join(victim, 'somefile'), 'foo')
victim = os.fsencode(victim)
self.assertIsInstance(victim, bytes)
shutil.rmtree(victim)
@support.skip_unless_symlink
def test_rmtree_fails_on_symlink(self):
tmp = self.mkdtemp()
dir_ = os.path.join(tmp, 'dir')
os.mkdir(dir_)
link = os.path.join(tmp, 'link')
os.symlink(dir_, link)
self.assertRaises(OSError, shutil.rmtree, link)
self.assertTrue(os.path.exists(dir_))
self.assertTrue(os.path.lexists(link))
errors = []
def onerror(*args):
errors.append(args)
shutil.rmtree(link, onerror=onerror)
self.assertEqual(len(errors), 1)
self.assertIs(errors[0][0], os.path.islink)
self.assertEqual(errors[0][1], link)
self.assertIsInstance(errors[0][2][1], OSError)
@support.skip_unless_symlink
def test_rmtree_works_on_symlinks(self):
tmp = self.mkdtemp()
dir1 = os.path.join(tmp, 'dir1')
dir2 = os.path.join(dir1, 'dir2')
dir3 = os.path.join(tmp, 'dir3')
for d in dir1, dir2, dir3:
os.mkdir(d)
file1 = os.path.join(tmp, 'file1')
write_file(file1, 'foo')
link1 = os.path.join(dir1, 'link1')
os.symlink(dir2, link1)
link2 = os.path.join(dir1, 'link2')
os.symlink(dir3, link2)
link3 = os.path.join(dir1, 'link3')
os.symlink(file1, link3)
# make sure symlinks are removed but not followed
shutil.rmtree(dir1)
self.assertFalse(os.path.exists(dir1))
self.assertTrue(os.path.exists(dir3))
self.assertTrue(os.path.exists(file1))
def test_rmtree_errors(self):
# filename is guaranteed not to exist
filename = tempfile.mktemp()
self.assertRaises(FileNotFoundError, shutil.rmtree, filename)
# test that ignore_errors option is honored
shutil.rmtree(filename, ignore_errors=True)
# existing file
tmpdir = self.mkdtemp()
write_file((tmpdir, "tstfile"), "")
filename = os.path.join(tmpdir, "tstfile")
with self.assertRaises(NotADirectoryError) as cm:
shutil.rmtree(filename)
# The reason for this rather odd construct is that Windows sprinkles
# a \*.* at the end of file names. But only sometimes on some buildbots
possible_args = [filename, os.path.join(filename, '*.*')]
self.assertIn(cm.exception.filename, possible_args)
self.assertTrue(os.path.exists(filename))
# test that ignore_errors option is honored
shutil.rmtree(filename, ignore_errors=True)
self.assertTrue(os.path.exists(filename))
errors = []
def onerror(*args):
errors.append(args)
shutil.rmtree(filename, onerror=onerror)
self.assertEqual(len(errors), 2)
self.assertIs(errors[0][0], os.listdir)
self.assertEqual(errors[0][1], filename)
self.assertIsInstance(errors[0][2][1], NotADirectoryError)
self.assertIn(errors[0][2][1].filename, possible_args)
self.assertIs(errors[1][0], os.rmdir)
self.assertEqual(errors[1][1], filename)
self.assertIsInstance(errors[1][2][1], NotADirectoryError)
self.assertIn(errors[1][2][1].filename, possible_args)
@unittest.skipUnless(hasattr(os, 'chmod'), 'requires os.chmod()')
@unittest.skipIf(sys.platform[:6] == 'cygwin',
"This test can't be run on Cygwin (issue #1071513).")
@unittest.skipIf(hasattr(os, 'geteuid') and os.geteuid() == 0,
"This test can't be run reliably as root (issue #1076467).")
def test_on_error(self):
self.errorState = 0
os.mkdir(TESTFN)
self.addCleanup(shutil.rmtree, TESTFN)
self.child_file_path = os.path.join(TESTFN, 'a')
self.child_dir_path = os.path.join(TESTFN, 'b')
support.create_empty_file(self.child_file_path)
os.mkdir(self.child_dir_path)
old_dir_mode = os.stat(TESTFN).st_mode
old_child_file_mode = os.stat(self.child_file_path).st_mode
old_child_dir_mode = os.stat(self.child_dir_path).st_mode
# Make unwritable.
new_mode = stat.S_IREAD|stat.S_IEXEC
os.chmod(self.child_file_path, new_mode)
os.chmod(self.child_dir_path, new_mode)
os.chmod(TESTFN, new_mode)
self.addCleanup(os.chmod, TESTFN, old_dir_mode)
self.addCleanup(os.chmod, self.child_file_path, old_child_file_mode)
self.addCleanup(os.chmod, self.child_dir_path, old_child_dir_mode)
shutil.rmtree(TESTFN, onerror=self.check_args_to_onerror)
# Test whether onerror has actually been called.
self.assertEqual(self.errorState, 3,
"Expected call to onerror function did not happen.")
def check_args_to_onerror(self, func, arg, exc):
# test_rmtree_errors deliberately runs rmtree
# on a directory that is chmod 500, which will fail.
# This function is run when shutil.rmtree fails.
# 99.9% of the time it initially fails to remove
# a file in the directory, so the first time through
# func is os.remove.
# However, some Linux machines running ZFS on
# FUSE experienced a failure earlier in the process
# at os.listdir. The first failure may legally
# be either.
if self.errorState < 2:
if func is os.unlink:
self.assertEqual(arg, self.child_file_path)
elif func is os.rmdir:
self.assertEqual(arg, self.child_dir_path)
else:
self.assertIs(func, os.listdir)
self.assertIn(arg, [TESTFN, self.child_dir_path])
self.assertTrue(issubclass(exc[0], OSError))
self.errorState += 1
else:
self.assertEqual(func, os.rmdir)
self.assertEqual(arg, TESTFN)
self.assertTrue(issubclass(exc[0], OSError))
self.errorState = 3
def test_rmtree_does_not_choke_on_failing_lstat(self):
try:
orig_lstat = os.lstat
def raiser(fn, *args, **kwargs):
if fn != TESTFN:
raise OSError()
else:
return orig_lstat(fn)
os.lstat = raiser
os.mkdir(TESTFN)
write_file((TESTFN, 'foo'), 'foo')
shutil.rmtree(TESTFN)
finally:
os.lstat = orig_lstat
@unittest.skipUnless(hasattr(os, 'chmod'), 'requires os.chmod')
@support.skip_unless_symlink
def test_copymode_follow_symlinks(self):
tmp_dir = self.mkdtemp()
src = os.path.join(tmp_dir, 'foo')
dst = os.path.join(tmp_dir, 'bar')
src_link = os.path.join(tmp_dir, 'baz')
dst_link = os.path.join(tmp_dir, 'quux')
write_file(src, 'foo')
write_file(dst, 'foo')
os.symlink(src, src_link)
os.symlink(dst, dst_link)
os.chmod(src, stat.S_IRWXU|stat.S_IRWXG)
# file to file
os.chmod(dst, stat.S_IRWXO)
self.assertNotEqual(os.stat(src).st_mode, os.stat(dst).st_mode)
shutil.copymode(src, dst)
self.assertEqual(os.stat(src).st_mode, os.stat(dst).st_mode)
# follow src link
os.chmod(dst, stat.S_IRWXO)
shutil.copymode(src_link, dst)
self.assertEqual(os.stat(src).st_mode, os.stat(dst).st_mode)
# follow dst link
os.chmod(dst, stat.S_IRWXO)
shutil.copymode(src, dst_link)
self.assertEqual(os.stat(src).st_mode, os.stat(dst).st_mode)
# follow both links
os.chmod(dst, stat.S_IRWXO)
shutil.copymode(src_link, dst)
self.assertEqual(os.stat(src).st_mode, os.stat(dst).st_mode)
@unittest.skipUnless(hasattr(os, 'lchmod'), 'requires os.lchmod')
@support.skip_unless_symlink
def test_copymode_symlink_to_symlink(self):
tmp_dir = self.mkdtemp()
src = os.path.join(tmp_dir, 'foo')
dst = os.path.join(tmp_dir, 'bar')
src_link = os.path.join(tmp_dir, 'baz')
dst_link = os.path.join(tmp_dir, 'quux')
write_file(src, 'foo')
write_file(dst, 'foo')
os.symlink(src, src_link)
os.symlink(dst, dst_link)
os.chmod(src, stat.S_IRWXU|stat.S_IRWXG)
os.chmod(dst, stat.S_IRWXU)
os.lchmod(src_link, stat.S_IRWXO|stat.S_IRWXG)
# link to link
os.lchmod(dst_link, stat.S_IRWXO)
shutil.copymode(src_link, dst_link, follow_symlinks=False)
self.assertEqual(os.lstat(src_link).st_mode,
os.lstat(dst_link).st_mode)
self.assertNotEqual(os.stat(src).st_mode, os.stat(dst).st_mode)
# src link - use chmod
os.lchmod(dst_link, stat.S_IRWXO)
shutil.copymode(src_link, dst, follow_symlinks=False)
self.assertEqual(os.stat(src).st_mode, os.stat(dst).st_mode)
# dst link - use chmod
os.lchmod(dst_link, stat.S_IRWXO)
shutil.copymode(src, dst_link, follow_symlinks=False)
self.assertEqual(os.stat(src).st_mode, os.stat(dst).st_mode)
@unittest.skipIf(hasattr(os, 'lchmod'), 'requires os.lchmod to be missing')
@support.skip_unless_symlink
def test_copymode_symlink_to_symlink_wo_lchmod(self):
tmp_dir = self.mkdtemp()
src = os.path.join(tmp_dir, 'foo')
dst = os.path.join(tmp_dir, 'bar')
src_link = os.path.join(tmp_dir, 'baz')
dst_link = os.path.join(tmp_dir, 'quux')
write_file(src, 'foo')
write_file(dst, 'foo')
os.symlink(src, src_link)
os.symlink(dst, dst_link)
shutil.copymode(src_link, dst_link, follow_symlinks=False) # silent fail
@support.skip_unless_symlink
def test_copystat_symlinks(self):
tmp_dir = self.mkdtemp()
src = os.path.join(tmp_dir, 'foo')
dst = os.path.join(tmp_dir, 'bar')
src_link = os.path.join(tmp_dir, 'baz')
dst_link = os.path.join(tmp_dir, 'qux')
write_file(src, 'foo')
src_stat = os.stat(src)
os.utime(src, (src_stat.st_atime,
src_stat.st_mtime - 42.0)) # ensure different mtimes
write_file(dst, 'bar')
self.assertNotEqual(os.stat(src).st_mtime, os.stat(dst).st_mtime)
os.symlink(src, src_link)
os.symlink(dst, dst_link)
if hasattr(os, 'lchmod'):
os.lchmod(src_link, stat.S_IRWXO)
if hasattr(os, 'lchflags') and hasattr(stat, 'UF_NODUMP'):
os.lchflags(src_link, stat.UF_NODUMP)
src_link_stat = os.lstat(src_link)
# follow
if hasattr(os, 'lchmod'):
shutil.copystat(src_link, dst_link, follow_symlinks=True)
self.assertNotEqual(src_link_stat.st_mode, os.stat(dst).st_mode)
# don't follow
shutil.copystat(src_link, dst_link, follow_symlinks=False)
dst_link_stat = os.lstat(dst_link)
if os.utime in os.supports_follow_symlinks:
for attr in 'st_atime', 'st_mtime':
# The modification times may be truncated in the new file.
self.assertLessEqual(getattr(src_link_stat, attr),
getattr(dst_link_stat, attr) + 1)
if hasattr(os, 'lchmod'):
self.assertEqual(src_link_stat.st_mode, dst_link_stat.st_mode)
if hasattr(os, 'lchflags') and hasattr(src_link_stat, 'st_flags'):
self.assertEqual(src_link_stat.st_flags, dst_link_stat.st_flags)
# tell to follow but dst is not a link
shutil.copystat(src_link, dst, follow_symlinks=False)
self.assertTrue(abs(os.stat(src).st_mtime - os.stat(dst).st_mtime) <
00000.1)
@unittest.skipUnless(hasattr(os, 'chflags') and
hasattr(errno, 'EOPNOTSUPP') and
hasattr(errno, 'ENOTSUP'),
"requires os.chflags, EOPNOTSUPP & ENOTSUP")
def test_copystat_handles_harmless_chflags_errors(self):
tmpdir = self.mkdtemp()
file1 = os.path.join(tmpdir, 'file1')
file2 = os.path.join(tmpdir, 'file2')
write_file(file1, 'xxx')
write_file(file2, 'xxx')
def make_chflags_raiser(err):
ex = OSError()
def _chflags_raiser(path, flags, *, follow_symlinks=True):
ex.errno = err
raise ex
return _chflags_raiser
old_chflags = os.chflags
try:
for err in errno.EOPNOTSUPP, errno.ENOTSUP:
os.chflags = make_chflags_raiser(err)
shutil.copystat(file1, file2)
# assert others errors break it
os.chflags = make_chflags_raiser(errno.EOPNOTSUPP + errno.ENOTSUP)
self.assertRaises(OSError, shutil.copystat, file1, file2)
finally:
os.chflags = old_chflags
@support.skip_unless_xattr
def test_copyxattr(self):
tmp_dir = self.mkdtemp()
src = os.path.join(tmp_dir, 'foo')
write_file(src, 'foo')
dst = os.path.join(tmp_dir, 'bar')
write_file(dst, 'bar')
# no xattr == no problem
shutil._copyxattr(src, dst)
# common case
os.setxattr(src, 'user.foo', b'42')
os.setxattr(src, 'user.bar', b'43')
shutil._copyxattr(src, dst)
self.assertEqual(os.listxattr(src), os.listxattr(dst))
self.assertEqual(
os.getxattr(src, 'user.foo'),
os.getxattr(dst, 'user.foo'))
# check errors don't affect other attrs
os.remove(dst)
write_file(dst, 'bar')
os_error = OSError(errno.EPERM, 'EPERM')
def _raise_on_user_foo(fname, attr, val, **kwargs):
if attr == 'user.foo':
raise os_error
else:
orig_setxattr(fname, attr, val, **kwargs)
try:
orig_setxattr = os.setxattr
os.setxattr = _raise_on_user_foo
shutil._copyxattr(src, dst)
self.assertIn('user.bar', os.listxattr(dst))
finally:
os.setxattr = orig_setxattr
# the source filesystem not supporting xattrs should be ok, too.
def _raise_on_src(fname, *, follow_symlinks=True):
if fname == src:
raise OSError(errno.ENOTSUP, 'Operation not supported')
return orig_listxattr(fname, follow_symlinks=follow_symlinks)
try:
orig_listxattr = os.listxattr
os.listxattr = _raise_on_src
shutil._copyxattr(src, dst)
finally:
os.listxattr = orig_listxattr
# test that shutil.copystat copies xattrs
src = os.path.join(tmp_dir, 'the_original')
write_file(src, src)
os.setxattr(src, 'user.the_value', b'fiddly')
dst = os.path.join(tmp_dir, 'the_copy')
write_file(dst, dst)
shutil.copystat(src, dst)
self.assertEqual(os.getxattr(dst, 'user.the_value'), b'fiddly')
@support.skip_unless_symlink
@support.skip_unless_xattr
@unittest.skipUnless(hasattr(os, 'geteuid') and os.geteuid() == 0,
'root privileges required')
def test_copyxattr_symlinks(self):
# On Linux, it's only possible to access non-user xattr for symlinks;
# which in turn require root privileges. This test should be expanded
# as soon as other platforms gain support for extended attributes.
tmp_dir = self.mkdtemp()
src = os.path.join(tmp_dir, 'foo')
src_link = os.path.join(tmp_dir, 'baz')
write_file(src, 'foo')
os.symlink(src, src_link)
os.setxattr(src, 'trusted.foo', b'42')
os.setxattr(src_link, 'trusted.foo', b'43', follow_symlinks=False)
dst = os.path.join(tmp_dir, 'bar')
dst_link = os.path.join(tmp_dir, 'qux')
write_file(dst, 'bar')
os.symlink(dst, dst_link)
shutil._copyxattr(src_link, dst_link, follow_symlinks=False)
self.assertEqual(os.getxattr(dst_link, 'trusted.foo', follow_symlinks=False), b'43')
self.assertRaises(OSError, os.getxattr, dst, 'trusted.foo')
shutil._copyxattr(src_link, dst, follow_symlinks=False)
self.assertEqual(os.getxattr(dst, 'trusted.foo'), b'43')
@support.skip_unless_symlink
def test_copy_symlinks(self):
tmp_dir = self.mkdtemp()
src = os.path.join(tmp_dir, 'foo')
dst = os.path.join(tmp_dir, 'bar')
src_link = os.path.join(tmp_dir, 'baz')
write_file(src, 'foo')
os.symlink(src, src_link)
if hasattr(os, 'lchmod'):
os.lchmod(src_link, stat.S_IRWXU | stat.S_IRWXO)
# don't follow
shutil.copy(src_link, dst, follow_symlinks=True)
self.assertFalse(os.path.islink(dst))
self.assertEqual(read_file(src), read_file(dst))
os.remove(dst)
# follow
shutil.copy(src_link, dst, follow_symlinks=False)
self.assertTrue(os.path.islink(dst))
self.assertEqual(os.readlink(dst), os.readlink(src_link))
if hasattr(os, 'lchmod'):
self.assertEqual(os.lstat(src_link).st_mode,
os.lstat(dst).st_mode)
@support.skip_unless_symlink
def test_copy2_symlinks(self):
tmp_dir = self.mkdtemp()
src = os.path.join(tmp_dir, 'foo')
dst = os.path.join(tmp_dir, 'bar')
src_link = os.path.join(tmp_dir, 'baz')
write_file(src, 'foo')
os.symlink(src, src_link)
if hasattr(os, 'lchmod'):
os.lchmod(src_link, stat.S_IRWXU | stat.S_IRWXO)
if hasattr(os, 'lchflags') and hasattr(stat, 'UF_NODUMP'):
os.lchflags(src_link, stat.UF_NODUMP)
src_stat = os.stat(src)
src_link_stat = os.lstat(src_link)
# follow
shutil.copy2(src_link, dst, follow_symlinks=True)
self.assertFalse(os.path.islink(dst))
self.assertEqual(read_file(src), read_file(dst))
os.remove(dst)
# don't follow
shutil.copy2(src_link, dst, follow_symlinks=False)
self.assertTrue(os.path.islink(dst))
self.assertEqual(os.readlink(dst), os.readlink(src_link))
dst_stat = os.lstat(dst)
if os.utime in os.supports_follow_symlinks:
for attr in 'st_atime', 'st_mtime':
# The modification times may be truncated in the new file.
self.assertLessEqual(getattr(src_link_stat, attr),
getattr(dst_stat, attr) + 1)
if hasattr(os, 'lchmod'):
self.assertEqual(src_link_stat.st_mode, dst_stat.st_mode)
self.assertNotEqual(src_stat.st_mode, dst_stat.st_mode)
if hasattr(os, 'lchflags') and hasattr(src_link_stat, 'st_flags'):
self.assertEqual(src_link_stat.st_flags, dst_stat.st_flags)
@support.skip_unless_xattr
def test_copy2_xattr(self):
tmp_dir = self.mkdtemp()
src = os.path.join(tmp_dir, 'foo')
dst = os.path.join(tmp_dir, 'bar')
write_file(src, 'foo')
os.setxattr(src, 'user.foo', b'42')
shutil.copy2(src, dst)
self.assertEqual(
os.getxattr(src, 'user.foo'),
os.getxattr(dst, 'user.foo'))
os.remove(dst)
@support.skip_unless_symlink
def test_copyfile_symlinks(self):
tmp_dir = self.mkdtemp()
src = os.path.join(tmp_dir, 'src')
dst = os.path.join(tmp_dir, 'dst')
dst_link = os.path.join(tmp_dir, 'dst_link')
link = os.path.join(tmp_dir, 'link')
write_file(src, 'foo')
os.symlink(src, link)
# don't follow
shutil.copyfile(link, dst_link, follow_symlinks=False)
self.assertTrue(os.path.islink(dst_link))
self.assertEqual(os.readlink(link), os.readlink(dst_link))
# follow
shutil.copyfile(link, dst)
self.assertFalse(os.path.islink(dst))
def test_rmtree_uses_safe_fd_version_if_available(self):
_use_fd_functions = ({os.open, os.stat, os.unlink, os.rmdir} <=
os.supports_dir_fd and
os.listdir in os.supports_fd and
os.stat in os.supports_follow_symlinks)
if _use_fd_functions:
self.assertTrue(shutil._use_fd_functions)
self.assertTrue(shutil.rmtree.avoids_symlink_attacks)
tmp_dir = self.mkdtemp()
d = os.path.join(tmp_dir, 'a')
os.mkdir(d)
try:
real_rmtree = shutil._rmtree_safe_fd
class Called(Exception): pass
def _raiser(*args, **kwargs):
raise Called
shutil._rmtree_safe_fd = _raiser
self.assertRaises(Called, shutil.rmtree, d)
finally:
shutil._rmtree_safe_fd = real_rmtree
else:
self.assertFalse(shutil._use_fd_functions)
self.assertFalse(shutil.rmtree.avoids_symlink_attacks)
def test_rmtree_dont_delete_file(self):
# When called on a file instead of a directory, don't delete it.
handle, path = tempfile.mkstemp()
os.close(handle)
self.assertRaises(NotADirectoryError, shutil.rmtree, path)
os.remove(path)
def test_copytree_simple(self):
src_dir = tempfile.mkdtemp()
dst_dir = os.path.join(tempfile.mkdtemp(), 'destination')
self.addCleanup(shutil.rmtree, src_dir)
self.addCleanup(shutil.rmtree, os.path.dirname(dst_dir))
write_file((src_dir, 'test.txt'), '123')
os.mkdir(os.path.join(src_dir, 'test_dir'))
write_file((src_dir, 'test_dir', 'test.txt'), '456')
shutil.copytree(src_dir, dst_dir)
self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test.txt')))
self.assertTrue(os.path.isdir(os.path.join(dst_dir, 'test_dir')))
self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test_dir',
'test.txt')))
actual = read_file((dst_dir, 'test.txt'))
self.assertEqual(actual, '123')
actual = read_file((dst_dir, 'test_dir', 'test.txt'))
self.assertEqual(actual, '456')
@support.skip_unless_symlink
def test_copytree_symlinks(self):
tmp_dir = self.mkdtemp()
src_dir = os.path.join(tmp_dir, 'src')
dst_dir = os.path.join(tmp_dir, 'dst')
sub_dir = os.path.join(src_dir, 'sub')
os.mkdir(src_dir)
os.mkdir(sub_dir)
write_file((src_dir, 'file.txt'), 'foo')
src_link = os.path.join(sub_dir, 'link')
dst_link = os.path.join(dst_dir, 'sub/link')
os.symlink(os.path.join(src_dir, 'file.txt'),
src_link)
if hasattr(os, 'lchmod'):
os.lchmod(src_link, stat.S_IRWXU | stat.S_IRWXO)
if hasattr(os, 'lchflags') and hasattr(stat, 'UF_NODUMP'):
os.lchflags(src_link, stat.UF_NODUMP)
src_stat = os.lstat(src_link)
shutil.copytree(src_dir, dst_dir, symlinks=True)
self.assertTrue(os.path.islink(os.path.join(dst_dir, 'sub', 'link')))
self.assertEqual(os.readlink(os.path.join(dst_dir, 'sub', 'link')),
os.path.join(src_dir, 'file.txt'))
dst_stat = os.lstat(dst_link)
if hasattr(os, 'lchmod'):
self.assertEqual(dst_stat.st_mode, src_stat.st_mode)
if hasattr(os, 'lchflags'):
self.assertEqual(dst_stat.st_flags, src_stat.st_flags)
def test_copytree_with_exclude(self):
# creating data
join = os.path.join
exists = os.path.exists
src_dir = tempfile.mkdtemp()
try:
dst_dir = join(tempfile.mkdtemp(), 'destination')
write_file((src_dir, 'test.txt'), '123')
write_file((src_dir, 'test.tmp'), '123')
os.mkdir(join(src_dir, 'test_dir'))
write_file((src_dir, 'test_dir', 'test.txt'), '456')
os.mkdir(join(src_dir, 'test_dir2'))
write_file((src_dir, 'test_dir2', 'test.txt'), '456')
os.mkdir(join(src_dir, 'test_dir2', 'subdir'))
os.mkdir(join(src_dir, 'test_dir2', 'subdir2'))
write_file((src_dir, 'test_dir2', 'subdir', 'test.txt'), '456')
write_file((src_dir, 'test_dir2', 'subdir2', 'test.py'), '456')
# testing glob-like patterns
try:
patterns = shutil.ignore_patterns('*.tmp', 'test_dir2')
shutil.copytree(src_dir, dst_dir, ignore=patterns)
# checking the result: some elements should not be copied
self.assertTrue(exists(join(dst_dir, 'test.txt')))
self.assertFalse(exists(join(dst_dir, 'test.tmp')))
self.assertFalse(exists(join(dst_dir, 'test_dir2')))
finally:
shutil.rmtree(dst_dir)
try:
patterns = shutil.ignore_patterns('*.tmp', 'subdir*')
shutil.copytree(src_dir, dst_dir, ignore=patterns)
# checking the result: some elements should not be copied
self.assertFalse(exists(join(dst_dir, 'test.tmp')))
self.assertFalse(exists(join(dst_dir, 'test_dir2', 'subdir2')))
self.assertFalse(exists(join(dst_dir, 'test_dir2', 'subdir')))
finally:
shutil.rmtree(dst_dir)
# testing callable-style
try:
def _filter(src, names):
res = []
for name in names:
path = os.path.join(src, name)
if (os.path.isdir(path) and
path.split()[-1] == 'subdir'):
res.append(name)
elif os.path.splitext(path)[-1] in ('.py'):
res.append(name)
return res
shutil.copytree(src_dir, dst_dir, ignore=_filter)
# checking the result: some elements should not be copied
self.assertFalse(exists(join(dst_dir, 'test_dir2', 'subdir2',
'test.py')))
self.assertFalse(exists(join(dst_dir, 'test_dir2', 'subdir')))
finally:
shutil.rmtree(dst_dir)
finally:
shutil.rmtree(src_dir)
shutil.rmtree(os.path.dirname(dst_dir))
def test_copytree_retains_permissions(self):
tmp_dir = tempfile.mkdtemp()
src_dir = os.path.join(tmp_dir, 'source')
os.mkdir(src_dir)
dst_dir = os.path.join(tmp_dir, 'destination')
self.addCleanup(shutil.rmtree, tmp_dir)
os.chmod(src_dir, 0o777)
write_file((src_dir, 'permissive.txt'), '123')
os.chmod(os.path.join(src_dir, 'permissive.txt'), 0o777)
write_file((src_dir, 'restrictive.txt'), '456')
os.chmod(os.path.join(src_dir, 'restrictive.txt'), 0o600)
restrictive_subdir = tempfile.mkdtemp(dir=src_dir)
os.chmod(restrictive_subdir, 0o600)
shutil.copytree(src_dir, dst_dir)
self.assertEqual(os.stat(src_dir).st_mode, os.stat(dst_dir).st_mode)
self.assertEqual(os.stat(os.path.join(src_dir, 'permissive.txt')).st_mode,
os.stat(os.path.join(dst_dir, 'permissive.txt')).st_mode)
self.assertEqual(os.stat(os.path.join(src_dir, 'restrictive.txt')).st_mode,
os.stat(os.path.join(dst_dir, 'restrictive.txt')).st_mode)
restrictive_subdir_dst = os.path.join(dst_dir,
os.path.split(restrictive_subdir)[1])
self.assertEqual(os.stat(restrictive_subdir).st_mode,
os.stat(restrictive_subdir_dst).st_mode)
@unittest.skipUnless(hasattr(os, 'link'), 'requires os.link')
def test_dont_copy_file_onto_link_to_itself(self):
# Temporarily disable test on Windows.
if os.name == 'nt':
return
# bug 851123.
os.mkdir(TESTFN)
src = os.path.join(TESTFN, 'cheese')
dst = os.path.join(TESTFN, 'shop')
try:
with open(src, 'w') as f:
f.write('cheddar')
os.link(src, dst)
self.assertRaises(shutil.SameFileError, shutil.copyfile, src, dst)
with open(src, 'r') as f:
self.assertEqual(f.read(), 'cheddar')
os.remove(dst)
finally:
shutil.rmtree(TESTFN, ignore_errors=True)
@support.skip_unless_symlink
def test_dont_copy_file_onto_symlink_to_itself(self):
# bug 851123.
os.mkdir(TESTFN)
src = os.path.join(TESTFN, 'cheese')
dst = os.path.join(TESTFN, 'shop')
try:
with open(src, 'w') as f:
f.write('cheddar')
# Using `src` here would mean we end up with a symlink pointing
# to TESTFN/TESTFN/cheese, while it should point at
# TESTFN/cheese.
os.symlink('cheese', dst)
self.assertRaises(shutil.SameFileError, shutil.copyfile, src, dst)
with open(src, 'r') as f:
self.assertEqual(f.read(), 'cheddar')
os.remove(dst)
finally:
shutil.rmtree(TESTFN, ignore_errors=True)
@support.skip_unless_symlink
def test_rmtree_on_symlink(self):
# bug 1669.
os.mkdir(TESTFN)
try:
src = os.path.join(TESTFN, 'cheese')
dst = os.path.join(TESTFN, 'shop')
os.mkdir(src)
os.symlink(src, dst)
self.assertRaises(OSError, shutil.rmtree, dst)
shutil.rmtree(dst, ignore_errors=True)
finally:
shutil.rmtree(TESTFN, ignore_errors=True)
# Issue #3002: copyfile and copytree block indefinitely on named pipes
@unittest.skipUnless(hasattr(os, "mkfifo"), 'requires os.mkfifo()')
def test_copyfile_named_pipe(self):
os.mkfifo(TESTFN)
try:
self.assertRaises(shutil.SpecialFileError,
shutil.copyfile, TESTFN, TESTFN2)
self.assertRaises(shutil.SpecialFileError,
shutil.copyfile, __file__, TESTFN)
finally:
os.remove(TESTFN)
@unittest.skipUnless(hasattr(os, "mkfifo"), 'requires os.mkfifo()')
@support.skip_unless_symlink
def test_copytree_named_pipe(self):
os.mkdir(TESTFN)
try:
subdir = os.path.join(TESTFN, "subdir")
os.mkdir(subdir)
pipe = os.path.join(subdir, "mypipe")
os.mkfifo(pipe)
try:
shutil.copytree(TESTFN, TESTFN2)
except shutil.Error as e:
errors = e.args[0]
self.assertEqual(len(errors), 1)
src, dst, error_msg = errors[0]
self.assertEqual("`%s` is a named pipe" % pipe, error_msg)
else:
self.fail("shutil.Error should have been raised")
finally:
shutil.rmtree(TESTFN, ignore_errors=True)
shutil.rmtree(TESTFN2, ignore_errors=True)
def test_copytree_special_func(self):
src_dir = self.mkdtemp()
dst_dir = os.path.join(self.mkdtemp(), 'destination')
write_file((src_dir, 'test.txt'), '123')
os.mkdir(os.path.join(src_dir, 'test_dir'))
write_file((src_dir, 'test_dir', 'test.txt'), '456')
copied = []
def _copy(src, dst):
copied.append((src, dst))
shutil.copytree(src_dir, dst_dir, copy_function=_copy)
self.assertEqual(len(copied), 2)
@support.skip_unless_symlink
def test_copytree_dangling_symlinks(self):
# a dangling symlink raises an error at the end
src_dir = self.mkdtemp()
dst_dir = os.path.join(self.mkdtemp(), 'destination')
os.symlink('IDONTEXIST', os.path.join(src_dir, 'test.txt'))
os.mkdir(os.path.join(src_dir, 'test_dir'))
write_file((src_dir, 'test_dir', 'test.txt'), '456')
self.assertRaises(Error, shutil.copytree, src_dir, dst_dir)
# a dangling symlink is ignored with the proper flag
dst_dir = os.path.join(self.mkdtemp(), 'destination2')
shutil.copytree(src_dir, dst_dir, ignore_dangling_symlinks=True)
self.assertNotIn('test.txt', os.listdir(dst_dir))
# a dangling symlink is copied if symlinks=True
dst_dir = os.path.join(self.mkdtemp(), 'destination3')
shutil.copytree(src_dir, dst_dir, symlinks=True)
self.assertIn('test.txt', os.listdir(dst_dir))
def _copy_file(self, method):
fname = 'test.txt'
tmpdir = self.mkdtemp()
write_file((tmpdir, fname), 'xxx')
file1 = os.path.join(tmpdir, fname)
tmpdir2 = self.mkdtemp()
method(file1, tmpdir2)
file2 = os.path.join(tmpdir2, fname)
return (file1, file2)
@unittest.skipUnless(hasattr(os, 'chmod'), 'requires os.chmod')
def test_copy(self):
# Ensure that the copied file exists and has the same mode bits.
file1, file2 = self._copy_file(shutil.copy)
self.assertTrue(os.path.exists(file2))
self.assertEqual(os.stat(file1).st_mode, os.stat(file2).st_mode)
@unittest.skipUnless(hasattr(os, 'chmod'), 'requires os.chmod')
@unittest.skipUnless(hasattr(os, 'utime'), 'requires os.utime')
def test_copy2(self):
# Ensure that the copied file exists and has the same mode and
# modification time bits.
file1, file2 = self._copy_file(shutil.copy2)
self.assertTrue(os.path.exists(file2))
file1_stat = os.stat(file1)
file2_stat = os.stat(file2)
self.assertEqual(file1_stat.st_mode, file2_stat.st_mode)
for attr in 'st_atime', 'st_mtime':
# The modification times may be truncated in the new file.
self.assertLessEqual(getattr(file1_stat, attr),
getattr(file2_stat, attr) + 1)
if hasattr(os, 'chflags') and hasattr(file1_stat, 'st_flags'):
self.assertEqual(getattr(file1_stat, 'st_flags'),
getattr(file2_stat, 'st_flags'))
@requires_zlib
def test_make_tarball(self):
# creating something to tar
tmpdir = self.mkdtemp()
write_file((tmpdir, 'file1'), 'xxx')
write_file((tmpdir, 'file2'), 'xxx')
os.mkdir(os.path.join(tmpdir, 'sub'))
write_file((tmpdir, 'sub', 'file3'), 'xxx')
tmpdir2 = self.mkdtemp()
# force shutil to create the directory
os.rmdir(tmpdir2)
unittest.skipUnless(splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0],
"source and target should be on same drive")
base_name = os.path.join(tmpdir2, 'archive')
# working with relative paths to avoid tar warnings
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
_make_tarball(splitdrive(base_name)[1], '.')
finally:
os.chdir(old_dir)
# check if the compressed tarball was created
tarball = base_name + '.tar.gz'
self.assertTrue(os.path.exists(tarball))
# trying an uncompressed one
base_name = os.path.join(tmpdir2, 'archive')
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
_make_tarball(splitdrive(base_name)[1], '.', compress=None)
finally:
os.chdir(old_dir)
tarball = base_name + '.tar'
self.assertTrue(os.path.exists(tarball))
def _tarinfo(self, path):
tar = tarfile.open(path)
try:
names = tar.getnames()
names.sort()
return tuple(names)
finally:
tar.close()
def _create_files(self):
# creating something to tar
tmpdir = self.mkdtemp()
dist = os.path.join(tmpdir, 'dist')
os.mkdir(dist)
write_file((dist, 'file1'), 'xxx')
write_file((dist, 'file2'), 'xxx')
os.mkdir(os.path.join(dist, 'sub'))
write_file((dist, 'sub', 'file3'), 'xxx')
os.mkdir(os.path.join(dist, 'sub2'))
tmpdir2 = self.mkdtemp()
base_name = os.path.join(tmpdir2, 'archive')
return tmpdir, tmpdir2, base_name
@requires_zlib
@unittest.skipUnless(find_executable('tar') and find_executable('gzip'),
'Need the tar command to run')
def test_tarfile_vs_tar(self):
tmpdir, tmpdir2, base_name = self._create_files()
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
_make_tarball(base_name, 'dist')
finally:
os.chdir(old_dir)
# check if the compressed tarball was created
tarball = base_name + '.tar.gz'
self.assertTrue(os.path.exists(tarball))
# now create another tarball using `tar`
tarball2 = os.path.join(tmpdir, 'archive2.tar.gz')
tar_cmd = ['tar', '-cf', 'archive2.tar', 'dist']
gzip_cmd = ['gzip', '-f9', 'archive2.tar']
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
with captured_stdout() as s:
spawn(tar_cmd)
spawn(gzip_cmd)
finally:
os.chdir(old_dir)
self.assertTrue(os.path.exists(tarball2))
# let's compare both tarballs
self.assertEqual(self._tarinfo(tarball), self._tarinfo(tarball2))
# trying an uncompressed one
base_name = os.path.join(tmpdir2, 'archive')
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
_make_tarball(base_name, 'dist', compress=None)
finally:
os.chdir(old_dir)
tarball = base_name + '.tar'
self.assertTrue(os.path.exists(tarball))
# now for a dry_run
base_name = os.path.join(tmpdir2, 'archive')
old_dir = os.getcwd()
os.chdir(tmpdir)
try:
_make_tarball(base_name, 'dist', compress=None, dry_run=True)
finally:
os.chdir(old_dir)
tarball = base_name + '.tar'
self.assertTrue(os.path.exists(tarball))
@requires_zlib
@unittest.skipUnless(ZIP_SUPPORT, 'Need zip support to run')
def test_make_zipfile(self):
# creating something to tar
tmpdir = self.mkdtemp()
write_file((tmpdir, 'file1'), 'xxx')
write_file((tmpdir, 'file2'), 'xxx')
tmpdir2 = self.mkdtemp()
# force shutil to create the directory
os.rmdir(tmpdir2)
base_name = os.path.join(tmpdir2, 'archive')
_make_zipfile(base_name, tmpdir)
# check if the compressed tarball was created
tarball = base_name + '.zip'
self.assertTrue(os.path.exists(tarball))
def test_make_archive(self):
tmpdir = self.mkdtemp()
base_name = os.path.join(tmpdir, 'archive')
self.assertRaises(ValueError, make_archive, base_name, 'xxx')
@requires_zlib
def test_make_archive_owner_group(self):
# testing make_archive with owner and group, with various combinations
# this works even if there's not gid/uid support
if UID_GID_SUPPORT:
group = grp.getgrgid(0)[0]
owner = pwd.getpwuid(0)[0]
else:
group = owner = 'root'
base_dir, root_dir, base_name = self._create_files()
base_name = os.path.join(self.mkdtemp() , 'archive')
res = make_archive(base_name, 'zip', root_dir, base_dir, owner=owner,
group=group)
self.assertTrue(os.path.exists(res))
res = make_archive(base_name, 'zip', root_dir, base_dir)
self.assertTrue(os.path.exists(res))
res = make_archive(base_name, 'tar', root_dir, base_dir,
owner=owner, group=group)
self.assertTrue(os.path.exists(res))
res = make_archive(base_name, 'tar', root_dir, base_dir,
owner='kjhkjhkjg', group='oihohoh')
self.assertTrue(os.path.exists(res))
@requires_zlib
@unittest.skipUnless(UID_GID_SUPPORT, "Requires grp and pwd support")
def test_tarfile_root_owner(self):
tmpdir, tmpdir2, base_name = self._create_files()
old_dir = os.getcwd()
os.chdir(tmpdir)
group = grp.getgrgid(0)[0]
owner = pwd.getpwuid(0)[0]
try:
archive_name = _make_tarball(base_name, 'dist', compress=None,
owner=owner, group=group)
finally:
os.chdir(old_dir)
# check if the compressed tarball was created
self.assertTrue(os.path.exists(archive_name))
# now checks the rights
archive = tarfile.open(archive_name)
try:
for member in archive.getmembers():
self.assertEqual(member.uid, 0)
self.assertEqual(member.gid, 0)
finally:
archive.close()
def test_make_archive_cwd(self):
current_dir = os.getcwd()
def _breaks(*args, **kw):
raise RuntimeError()
register_archive_format('xxx', _breaks, [], 'xxx file')
try:
try:
make_archive('xxx', 'xxx', root_dir=self.mkdtemp())
except Exception:
pass
self.assertEqual(os.getcwd(), current_dir)
finally:
unregister_archive_format('xxx')
def test_register_archive_format(self):
self.assertRaises(TypeError, register_archive_format, 'xxx', 1)
self.assertRaises(TypeError, register_archive_format, 'xxx', lambda: x,
1)
self.assertRaises(TypeError, register_archive_format, 'xxx', lambda: x,
[(1, 2), (1, 2, 3)])
register_archive_format('xxx', lambda: x, [(1, 2)], 'xxx file')
formats = [name for name, params in get_archive_formats()]
self.assertIn('xxx', formats)
unregister_archive_format('xxx')
formats = [name for name, params in get_archive_formats()]
self.assertNotIn('xxx', formats)
def _compare_dirs(self, dir1, dir2):
# check that dir1 and dir2 are equivalent,
# return the diff
diff = []
for root, dirs, files in os.walk(dir1):
for file_ in files:
path = os.path.join(root, file_)
target_path = os.path.join(dir2, os.path.split(path)[-1])
if not os.path.exists(target_path):
diff.append(file_)
return diff
@requires_zlib
def test_unpack_archive(self):
formats = ['tar', 'gztar', 'zip']
if BZ2_SUPPORTED:
formats.append('bztar')
for format in formats:
tmpdir = self.mkdtemp()
base_dir, root_dir, base_name = self._create_files()
tmpdir2 = self.mkdtemp()
filename = make_archive(base_name, format, root_dir, base_dir)
# let's try to unpack it now
unpack_archive(filename, tmpdir2)
diff = self._compare_dirs(tmpdir, tmpdir2)
self.assertEqual(diff, [])
# and again, this time with the format specified
tmpdir3 = self.mkdtemp()
unpack_archive(filename, tmpdir3, format=format)
diff = self._compare_dirs(tmpdir, tmpdir3)
self.assertEqual(diff, [])
self.assertRaises(shutil.ReadError, unpack_archive, TESTFN)
self.assertRaises(ValueError, unpack_archive, TESTFN, format='xxx')
def test_unpack_registery(self):
formats = get_unpack_formats()
def _boo(filename, extract_dir, extra):
self.assertEqual(extra, 1)
self.assertEqual(filename, 'stuff.boo')
self.assertEqual(extract_dir, 'xx')
register_unpack_format('Boo', ['.boo', '.b2'], _boo, [('extra', 1)])
unpack_archive('stuff.boo', 'xx')
# trying to register a .boo unpacker again
self.assertRaises(RegistryError, register_unpack_format, 'Boo2',
['.boo'], _boo)
# should work now
unregister_unpack_format('Boo')
register_unpack_format('Boo2', ['.boo'], _boo)
self.assertIn(('Boo2', ['.boo'], ''), get_unpack_formats())
self.assertNotIn(('Boo', ['.boo'], ''), get_unpack_formats())
# let's leave a clean state
unregister_unpack_format('Boo2')
self.assertEqual(get_unpack_formats(), formats)
@unittest.skipUnless(hasattr(shutil, 'disk_usage'),
"disk_usage not available on this platform")
def test_disk_usage(self):
usage = shutil.disk_usage(os.getcwd())
self.assertGreater(usage.total, 0)
self.assertGreater(usage.used, 0)
self.assertGreaterEqual(usage.free, 0)
self.assertGreaterEqual(usage.total, usage.used)
self.assertGreater(usage.total, usage.free)
@unittest.skipUnless(UID_GID_SUPPORT, "Requires grp and pwd support")
@unittest.skipUnless(hasattr(os, 'chown'), 'requires os.chown')
def test_chown(self):
# cleaned-up automatically by TestShutil.tearDown method
dirname = self.mkdtemp()
filename = tempfile.mktemp(dir=dirname)
write_file(filename, 'testing chown function')
with self.assertRaises(ValueError):
shutil.chown(filename)
with self.assertRaises(LookupError):
shutil.chown(filename, user='non-exising username')
with self.assertRaises(LookupError):
shutil.chown(filename, group='non-exising groupname')
with self.assertRaises(TypeError):
shutil.chown(filename, b'spam')
with self.assertRaises(TypeError):
shutil.chown(filename, 3.14)
uid = os.getuid()
gid = os.getgid()
def check_chown(path, uid=None, gid=None):
s = os.stat(filename)
if uid is not None:
self.assertEqual(uid, s.st_uid)
if gid is not None:
self.assertEqual(gid, s.st_gid)
shutil.chown(filename, uid, gid)
check_chown(filename, uid, gid)
shutil.chown(filename, uid)
check_chown(filename, uid)
shutil.chown(filename, user=uid)
check_chown(filename, uid)
shutil.chown(filename, group=gid)
check_chown(filename, gid=gid)
shutil.chown(dirname, uid, gid)
check_chown(dirname, uid, gid)
shutil.chown(dirname, uid)
check_chown(dirname, uid)
shutil.chown(dirname, user=uid)
check_chown(dirname, uid)
shutil.chown(dirname, group=gid)
check_chown(dirname, gid=gid)
user = pwd.getpwuid(uid)[0]
group = grp.getgrgid(gid)[0]
shutil.chown(filename, user, group)
check_chown(filename, uid, gid)
shutil.chown(dirname, user, group)
check_chown(dirname, uid, gid)
def test_copy_return_value(self):
# copy and copy2 both return their destination path.
for fn in (shutil.copy, shutil.copy2):
src_dir = self.mkdtemp()
dst_dir = self.mkdtemp()
src = os.path.join(src_dir, 'foo')
write_file(src, 'foo')
rv = fn(src, dst_dir)
self.assertEqual(rv, os.path.join(dst_dir, 'foo'))
rv = fn(src, os.path.join(dst_dir, 'bar'))
self.assertEqual(rv, os.path.join(dst_dir, 'bar'))
def test_copyfile_return_value(self):
# copytree returns its destination path.
src_dir = self.mkdtemp()
dst_dir = self.mkdtemp()
dst_file = os.path.join(dst_dir, 'bar')
src_file = os.path.join(src_dir, 'foo')
write_file(src_file, 'foo')
rv = shutil.copyfile(src_file, dst_file)
self.assertTrue(os.path.exists(rv))
self.assertEqual(read_file(src_file), read_file(dst_file))
def test_copyfile_same_file(self):
# copyfile() should raise SameFileError if the source and destination
# are the same.
src_dir = self.mkdtemp()
src_file = os.path.join(src_dir, 'foo')
write_file(src_file, 'foo')
self.assertRaises(SameFileError, shutil.copyfile, src_file, src_file)
# But Error should work too, to stay backward compatible.
self.assertRaises(Error, shutil.copyfile, src_file, src_file)
def test_copytree_return_value(self):
# copytree returns its destination path.
src_dir = self.mkdtemp()
dst_dir = src_dir + "dest"
self.addCleanup(shutil.rmtree, dst_dir, True)
src = os.path.join(src_dir, 'foo')
write_file(src, 'foo')
rv = shutil.copytree(src_dir, dst_dir)
self.assertEqual(['foo'], os.listdir(rv))
class TestWhich(unittest.TestCase):
def setUp(self):
self.temp_dir = tempfile.mkdtemp(prefix="Tmp")
self.addCleanup(shutil.rmtree, self.temp_dir, True)
# Give the temp_file an ".exe" suffix for all.
# It's needed on Windows and not harmful on other platforms.
self.temp_file = tempfile.NamedTemporaryFile(dir=self.temp_dir,
prefix="Tmp",
suffix=".Exe")
os.chmod(self.temp_file.name, stat.S_IXUSR)
self.addCleanup(self.temp_file.close)
self.dir, self.file = os.path.split(self.temp_file.name)
def test_basic(self):
# Given an EXE in a directory, it should be returned.
rv = shutil.which(self.file, path=self.dir)
self.assertEqual(rv, self.temp_file.name)
def test_absolute_cmd(self):
# When given the fully qualified path to an executable that exists,
# it should be returned.
rv = shutil.which(self.temp_file.name, path=self.temp_dir)
self.assertEqual(rv, self.temp_file.name)
def test_relative_cmd(self):
# When given the relative path with a directory part to an executable
# that exists, it should be returned.
base_dir, tail_dir = os.path.split(self.dir)
relpath = os.path.join(tail_dir, self.file)
with support.change_cwd(path=base_dir):
rv = shutil.which(relpath, path=self.temp_dir)
self.assertEqual(rv, relpath)
# But it shouldn't be searched in PATH directories (issue #16957).
with support.change_cwd(path=self.dir):
rv = shutil.which(relpath, path=base_dir)
self.assertIsNone(rv)
def test_cwd(self):
# Issue #16957
base_dir = os.path.dirname(self.dir)
with support.change_cwd(path=self.dir):
rv = shutil.which(self.file, path=base_dir)
if sys.platform == "win32":
# Windows: current directory implicitly on PATH
self.assertEqual(rv, os.path.join(os.curdir, self.file))
else:
# Other platforms: shouldn't match in the current directory.
self.assertIsNone(rv)
@unittest.skipIf(hasattr(os, 'geteuid') and os.geteuid() == 0,
'non-root user required')
def test_non_matching_mode(self):
# Set the file read-only and ask for writeable files.
os.chmod(self.temp_file.name, stat.S_IREAD)
if os.access(self.temp_file.name, os.W_OK):
self.skipTest("can't set the file read-only")
rv = shutil.which(self.file, path=self.dir, mode=os.W_OK)
self.assertIsNone(rv)
def test_relative_path(self):
base_dir, tail_dir = os.path.split(self.dir)
with support.change_cwd(path=base_dir):
rv = shutil.which(self.file, path=tail_dir)
self.assertEqual(rv, os.path.join(tail_dir, self.file))
def test_nonexistent_file(self):
# Return None when no matching executable file is found on the path.
rv = shutil.which("foo.exe", path=self.dir)
self.assertIsNone(rv)
@unittest.skipUnless(sys.platform == "win32",
"pathext check is Windows-only")
def test_pathext_checking(self):
# Ask for the file without the ".exe" extension, then ensure that
# it gets found properly with the extension.
rv = shutil.which(self.file[:-4], path=self.dir)
self.assertEqual(rv, self.temp_file.name[:-4] + ".EXE")
def test_environ_path(self):
with support.EnvironmentVarGuard() as env:
env['PATH'] = self.dir
rv = shutil.which(self.file)
self.assertEqual(rv, self.temp_file.name)
def test_empty_path(self):
base_dir = os.path.dirname(self.dir)
with support.change_cwd(path=self.dir), \
support.EnvironmentVarGuard() as env:
env['PATH'] = self.dir
rv = shutil.which(self.file, path='')
self.assertIsNone(rv)
def test_empty_path_no_PATH(self):
with support.EnvironmentVarGuard() as env:
env.pop('PATH', None)
rv = shutil.which(self.file)
self.assertIsNone(rv)
class TestMove(unittest.TestCase):
def setUp(self):
filename = "foo"
self.src_dir = tempfile.mkdtemp()
self.dst_dir = tempfile.mkdtemp()
self.src_file = os.path.join(self.src_dir, filename)
self.dst_file = os.path.join(self.dst_dir, filename)
with open(self.src_file, "wb") as f:
f.write(b"spam")
def tearDown(self):
for d in (self.src_dir, self.dst_dir):
try:
if d:
shutil.rmtree(d)
except:
pass
def _check_move_file(self, src, dst, real_dst):
with open(src, "rb") as f:
contents = f.read()
shutil.move(src, dst)
with open(real_dst, "rb") as f:
self.assertEqual(contents, f.read())
self.assertFalse(os.path.exists(src))
def _check_move_dir(self, src, dst, real_dst):
contents = sorted(os.listdir(src))
shutil.move(src, dst)
self.assertEqual(contents, sorted(os.listdir(real_dst)))
self.assertFalse(os.path.exists(src))
def test_move_file(self):
# Move a file to another location on the same filesystem.
self._check_move_file(self.src_file, self.dst_file, self.dst_file)
def test_move_file_to_dir(self):
# Move a file inside an existing dir on the same filesystem.
self._check_move_file(self.src_file, self.dst_dir, self.dst_file)
@mock_rename
def test_move_file_other_fs(self):
# Move a file to an existing dir on another filesystem.
self.test_move_file()
@mock_rename
def test_move_file_to_dir_other_fs(self):
# Move a file to another location on another filesystem.
self.test_move_file_to_dir()
def test_move_dir(self):
# Move a dir to another location on the same filesystem.
dst_dir = tempfile.mktemp()
try:
self._check_move_dir(self.src_dir, dst_dir, dst_dir)
finally:
try:
shutil.rmtree(dst_dir)
except:
pass
@mock_rename
def test_move_dir_other_fs(self):
# Move a dir to another location on another filesystem.
self.test_move_dir()
def test_move_dir_to_dir(self):
# Move a dir inside an existing dir on the same filesystem.
self._check_move_dir(self.src_dir, self.dst_dir,
os.path.join(self.dst_dir, os.path.basename(self.src_dir)))
@mock_rename
def test_move_dir_to_dir_other_fs(self):
# Move a dir inside an existing dir on another filesystem.
self.test_move_dir_to_dir()
def test_existing_file_inside_dest_dir(self):
# A file with the same name inside the destination dir already exists.
with open(self.dst_file, "wb"):
pass
self.assertRaises(shutil.Error, shutil.move, self.src_file, self.dst_dir)
def test_dont_move_dir_in_itself(self):
# Moving a dir inside itself raises an Error.
dst = os.path.join(self.src_dir, "bar")
self.assertRaises(shutil.Error, shutil.move, self.src_dir, dst)
def test_destinsrc_false_negative(self):
os.mkdir(TESTFN)
try:
for src, dst in [('srcdir', 'srcdir/dest')]:
src = os.path.join(TESTFN, src)
dst = os.path.join(TESTFN, dst)
self.assertTrue(shutil._destinsrc(src, dst),
msg='_destinsrc() wrongly concluded that '
'dst (%s) is not in src (%s)' % (dst, src))
finally:
shutil.rmtree(TESTFN, ignore_errors=True)
def test_destinsrc_false_positive(self):
os.mkdir(TESTFN)
try:
for src, dst in [('srcdir', 'src/dest'), ('srcdir', 'srcdir.new')]:
src = os.path.join(TESTFN, src)
dst = os.path.join(TESTFN, dst)
self.assertFalse(shutil._destinsrc(src, dst),
msg='_destinsrc() wrongly concluded that '
'dst (%s) is in src (%s)' % (dst, src))
finally:
shutil.rmtree(TESTFN, ignore_errors=True)
@support.skip_unless_symlink
@mock_rename
def test_move_file_symlink(self):
dst = os.path.join(self.src_dir, 'bar')
os.symlink(self.src_file, dst)
shutil.move(dst, self.dst_file)
self.assertTrue(os.path.islink(self.dst_file))
self.assertTrue(os.path.samefile(self.src_file, self.dst_file))
@support.skip_unless_symlink
@mock_rename
def test_move_file_symlink_to_dir(self):
filename = "bar"
dst = os.path.join(self.src_dir, filename)
os.symlink(self.src_file, dst)
shutil.move(dst, self.dst_dir)
final_link = os.path.join(self.dst_dir, filename)
self.assertTrue(os.path.islink(final_link))
self.assertTrue(os.path.samefile(self.src_file, final_link))
@support.skip_unless_symlink
@mock_rename
def test_move_dangling_symlink(self):
src = os.path.join(self.src_dir, 'baz')
dst = os.path.join(self.src_dir, 'bar')
os.symlink(src, dst)
dst_link = os.path.join(self.dst_dir, 'quux')
shutil.move(dst, dst_link)
self.assertTrue(os.path.islink(dst_link))
self.assertEqual(os.path.realpath(src), os.path.realpath(dst_link))
@support.skip_unless_symlink
@mock_rename
def test_move_dir_symlink(self):
src = os.path.join(self.src_dir, 'baz')
dst = os.path.join(self.src_dir, 'bar')
os.mkdir(src)
os.symlink(src, dst)
dst_link = os.path.join(self.dst_dir, 'quux')
shutil.move(dst, dst_link)
self.assertTrue(os.path.islink(dst_link))
self.assertTrue(os.path.samefile(src, dst_link))
def test_move_return_value(self):
rv = shutil.move(self.src_file, self.dst_dir)
self.assertEqual(rv,
os.path.join(self.dst_dir, os.path.basename(self.src_file)))
def test_move_as_rename_return_value(self):
rv = shutil.move(self.src_file, os.path.join(self.dst_dir, 'bar'))
self.assertEqual(rv, os.path.join(self.dst_dir, 'bar'))
class TestCopyFile(unittest.TestCase):
_delete = False
class Faux(object):
_entered = False
_exited_with = None
_raised = False
def __init__(self, raise_in_exit=False, suppress_at_exit=True):
self._raise_in_exit = raise_in_exit
self._suppress_at_exit = suppress_at_exit
def read(self, *args):
return ''
def __enter__(self):
self._entered = True
def __exit__(self, exc_type, exc_val, exc_tb):
self._exited_with = exc_type, exc_val, exc_tb
if self._raise_in_exit:
self._raised = True
raise OSError("Cannot close")
return self._suppress_at_exit
def tearDown(self):
if self._delete:
del shutil.open
def _set_shutil_open(self, func):
shutil.open = func
self._delete = True
def test_w_source_open_fails(self):
def _open(filename, mode='r'):
if filename == 'srcfile':
raise OSError('Cannot open "srcfile"')
assert 0 # shouldn't reach here.
self._set_shutil_open(_open)
self.assertRaises(OSError, shutil.copyfile, 'srcfile', 'destfile')
def test_w_dest_open_fails(self):
srcfile = self.Faux()
def _open(filename, mode='r'):
if filename == 'srcfile':
return srcfile
if filename == 'destfile':
raise OSError('Cannot open "destfile"')
assert 0 # shouldn't reach here.
self._set_shutil_open(_open)
shutil.copyfile('srcfile', 'destfile')
self.assertTrue(srcfile._entered)
self.assertTrue(srcfile._exited_with[0] is OSError)
self.assertEqual(srcfile._exited_with[1].args,
('Cannot open "destfile"',))
def test_w_dest_close_fails(self):
srcfile = self.Faux()
destfile = self.Faux(True)
def _open(filename, mode='r'):
if filename == 'srcfile':
return srcfile
if filename == 'destfile':
return destfile
assert 0 # shouldn't reach here.
self._set_shutil_open(_open)
shutil.copyfile('srcfile', 'destfile')
self.assertTrue(srcfile._entered)
self.assertTrue(destfile._entered)
self.assertTrue(destfile._raised)
self.assertTrue(srcfile._exited_with[0] is OSError)
self.assertEqual(srcfile._exited_with[1].args,
('Cannot close',))
def test_w_source_close_fails(self):
srcfile = self.Faux(True)
destfile = self.Faux()
def _open(filename, mode='r'):
if filename == 'srcfile':
return srcfile
if filename == 'destfile':
return destfile
assert 0 # shouldn't reach here.
self._set_shutil_open(_open)
self.assertRaises(OSError,
shutil.copyfile, 'srcfile', 'destfile')
self.assertTrue(srcfile._entered)
self.assertTrue(destfile._entered)
self.assertFalse(destfile._raised)
self.assertTrue(srcfile._exited_with[0] is None)
self.assertTrue(srcfile._raised)
def test_move_dir_caseinsensitive(self):
# Renames a folder to the same name
# but a different case.
self.src_dir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, self.src_dir, True)
dst_dir = os.path.join(
os.path.dirname(self.src_dir),
os.path.basename(self.src_dir).upper())
self.assertNotEqual(self.src_dir, dst_dir)
try:
shutil.move(self.src_dir, dst_dir)
self.assertTrue(os.path.isdir(dst_dir))
finally:
os.rmdir(dst_dir)
class TermsizeTests(unittest.TestCase):
def test_does_not_crash(self):
"""Check if get_terminal_size() returns a meaningful value.
There's no easy portable way to actually check the size of the
terminal, so let's check if it returns something sensible instead.
"""
size = shutil.get_terminal_size()
self.assertGreaterEqual(size.columns, 0)
self.assertGreaterEqual(size.lines, 0)
def test_os_environ_first(self):
"Check if environment variables have precedence"
with support.EnvironmentVarGuard() as env:
env['COLUMNS'] = '777'
size = shutil.get_terminal_size()
self.assertEqual(size.columns, 777)
with support.EnvironmentVarGuard() as env:
env['LINES'] = '888'
size = shutil.get_terminal_size()
self.assertEqual(size.lines, 888)
@unittest.skipUnless(os.isatty(sys.__stdout__.fileno()), "not on tty")
def test_stty_match(self):
"""Check if stty returns the same results ignoring env
This test will fail if stdin and stdout are connected to
different terminals with different sizes. Nevertheless, such
situations should be pretty rare.
"""
try:
size = subprocess.check_output(['stty', 'size']).decode().split()
except (FileNotFoundError, subprocess.CalledProcessError):
self.skipTest("stty invocation failed")
expected = (int(size[1]), int(size[0])) # reversed order
with support.EnvironmentVarGuard() as env:
del env['LINES']
del env['COLUMNS']
actual = shutil.get_terminal_size()
self.assertEqual(expected, actual)
if __name__ == '__main__':
unittest.main()
|
var expect = require('expect.js');
var util = require('util');
var chalk = require('chalk');
var fixtures = require('../fixtures');
var helpers = require('../helpers');
var BaseReporter = require('../../lib/reporters/base');
var Inspector = require('../../lib/inspector');
// A simple TestReporter for testing the BaseReporter
class TestReporter extends BaseReporter {
constructor(inspector) {
super(inspector);
this._registerSummary();
}
_getOutput() {}
}
describe('BaseReporter', function () {
var inspector, reporter;
beforeEach(function () {
helpers.captureOutput();
inspector = new Inspector([fixtures.intersection], {
threshold: 15,
});
reporter = new TestReporter(inspector);
});
afterEach(function () {
helpers.restoreOutput();
});
describe('constructor', function () {
it('accepts an inspector as an argument', function () {
expect(reporter._inspector).to.be(inspector);
});
it('registers a listener for the match event', function () {
expect(inspector.listeners('match')).to.have.length(1);
});
});
describe('given a match', function () {
it('increments the number found', function () {
inspector.emit('match', {});
helpers.restoreOutput();
expect(reporter._found).to.be(1);
});
it('invokes _getOutput', function () {
reporter._getOutput = function (match) {
return match;
};
inspector.emit('match', 'invoked');
helpers.restoreOutput();
expect(helpers.getOutput()).to.be('invoked');
});
});
describe('summary', function () {
it('can be printed on inspector end', function () {
inspector.run();
helpers.restoreOutput();
expect(helpers.getOutput()).to.not.be(null);
});
it('prints the correct results if no matches were found', function () {
inspector = new Inspector([fixtures.intersection], {
threshold: 40,
});
var reporter = new TestReporter(inspector);
inspector.run();
helpers.restoreOutput();
expect(helpers.getOutput()).to.be('\nNo matches found across 1 file\n');
});
it('prints the correct results if matches were found', function () {
inspector.run();
helpers.restoreOutput();
expect(helpers.getOutput()).to.be('\n1 match found across 1 file\n');
});
});
});
|
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from django.views.generic.base import TemplateView
from ._common import DALMEContextMixin
from dalme_app.forms import SearchForm
from django.forms import formset_factory
from dalme_app.utils import Search, SearchContext
from django.shortcuts import render
@method_decorator(login_required, name='dispatch')
class DefaultSearch(TemplateView, DALMEContextMixin):
template_name = 'dalme_app/search.html'
breadcrumb = [('Search', ''), ('Search', '')]
page_title = 'Search'
search_context = SearchContext(public=False)
search_formset = formset_factory(SearchForm)
def dispatch(self, request, *args, **kwargs):
if not request.method == 'POST' and request.session.get('search-post', False):
request.POST = request.session['search-post']
request.method = 'POST'
if request.method.lower() in self.http_method_names:
handler = getattr(self, request.method.lower(), self.http_method_not_allowed)
else:
handler = self.http_method_not_allowed
return handler(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context.update({
'query': False,
'advanced': False,
'form': self.search_formset(form_kwargs={'fields': self.search_context.fields}),
'results': [],
'paginator': {},
'errors': False,
'paginated': False,
'suggestion': None,
'search': True,
'search_context': self.search_context.context
})
return context
def post(self, request, **kwargs):
formset = self.search_formset(request.POST, form_kwargs={'fields': self.search_context.fields})
request.session['search-post'] = request.POST
context = self.get_context_data(**kwargs)
if formset.is_valid():
search_obj = Search(
data=formset.cleaned_data,
page=request.POST.get('form-PAGE', 1),
highlight=True,
search_context=self.search_context.context
)
context.update({
'query': True,
'advanced': formset.cleaned_data[0].get('field_value', '') != '',
'form': formset,
'results': search_obj.results,
'paginator': search_obj.paginator,
'errors': search_obj.errors,
'paginated': search_obj.paginator.get('num_pages', 0) > 1
})
return render(request, self.template_name, context)
|
import * as React from 'react';
import NavigationTestUtils from 'react-navigation/NavigationTestUtils';
import renderer from 'react-test-renderer';
import App from '../App';
jest.mock('expo', () => ({
AppLoading: 'AppLoading',
}));
jest.mock('../navigation/AppNavigator', () => 'AppNavigator');
describe('App', () => {
jest.useFakeTimers();
beforeEach(() => {
NavigationTestUtils.resetInternalState();
});
it('renders the loading screen', () => {
const tree = renderer.create(<App />).toJSON();
expect(tree).toMatchSnapshot();
});
it('renders the root without loading screen', () => {
const tree = renderer.create(<App skipLoadingScreen />).toJSON();
expect(tree).toMatchSnapshot();
});
});
|
(function() {
"use strict";
JSYG.Alignment = function(arg) {
this.list = arg;
};
JSYG.Alignment.prototype = new JSYG.StdConstruct();
JSYG.Alignment.prototype.onalign = null;
JSYG.Alignment.prototype.onalignleft = null;
JSYG.Alignment.prototype.onaligncenter = null;
JSYG.Alignment.prototype.onalignright = null;
JSYG.Alignment.prototype.onaligntop = null;
JSYG.Alignment.prototype.onalignmiddle = null;
JSYG.Alignment.prototype.onalignbottom = null;
JSYG.Alignment.prototype.list = null;
JSYG.Alignment.prototype.getGlobalDim = function() {
var globalDim = {
left:Infinity,
top:Infinity,
bottom:-Infinity,
right:-Infinity
};
var list = new JSYG(this.list);
var parent = list[0].parentNode;
list.each(function() {
if (this[0].parentNode != parent) throw new Error("Les éléments de la collection doivent partager le m�me parent");
var dim = this.getDim(parent);
if (dim.x < globalDim.left) globalDim.left = dim.x;
if (dim.y < globalDim.top) globalDim.top = dim.y;
if (dim.x + dim.width > globalDim.right) globalDim.right = dim.x + dim.width;
if (dim.y + dim.height > globalDim.bottom) globalDim.bottom = dim.y + dim.height;
},true);
return {
x : globalDim.left,
y : globalDim.top,
width : globalDim.right - globalDim.left,
height : globalDim.bottom - globalDim.top,
};
};
JSYG.Alignment.prototype.getCenter = function() {
var globalDim = this.getGlobalDim();
return new JSYG.Vect( globalDim.x+globalDim.width/2, globalDim.y+globalDim.height/2 );
};
JSYG.Alignment.prototype.alignLeft = function() {
var globalDim = this.getGlobalDim(),
left = globalDim.x,
list = new JSYG(this.list),
parent = list[0].parentNode;
list.setDim({x:left,from:parent});
this.trigger("align");
this.trigger("alignleft",null,left);
return this;
};
JSYG.Alignment.prototype.alignCenter = function() {
var center = this.getCenter(),
list = new JSYG(this.list),
parent = list[0].parentNode;
list.each(function() {
var mtx = this.getMtx().inverse(),
dim = this.getDim(),
dimP = this.getDim(parent),
pt1 = new JSYG.Vect(dimP.x+dimP.width/2,0).mtx(mtx),
pt2 = new JSYG.Vect(center.x,0).mtx(mtx);
this.setDim({
x : dim.x + pt2.x - pt1.x,
y : dim.y + pt2.y - pt1.y
});
},true);
this.trigger("align");
this.trigger("aligncenter",null,center.x);
return this;
};
JSYG.Alignment.prototype.alignRight = function() {
var globalDim = this.getGlobalDim(),
right = globalDim.x + globalDim.width,
list = new JSYG(this.list),
parent = list[0].parentNode;
list.each(function() {
var mtx = this.getMtx().inverse(),
dim = this.getDim(),
dimP = this.getDim(parent),
pt1 = new JSYG.Vect(dimP.x,0).mtx(mtx),
pt2 = new JSYG.Vect(right - dimP.width,0).mtx(mtx);
this.setDim({
x : dim.x + pt2.x - pt1.x,
y : dim.y + pt2.y - pt1.y
});
},true);
this.trigger("align");
this.trigger("alignright",null,right);
return this;
};
JSYG.Alignment.prototype.alignTop = function() {
var top = this.getGlobalDim().y,
list = new JSYG(this.list),
parent = list[0].parentNode;
list.each(function() {
var mtx = this.getMtx().inverse(),
dim = this.getDim(),
dimP = this.getDim(parent),
pt1 = new JSYG.Vect(0,dimP.y).mtx(mtx),
pt2 = new JSYG.Vect(0,top).mtx(mtx);
this.setDim({
x : dim.x + pt2.x - pt1.x,
y : dim.y + pt2.y - pt1.y
});
},true);
this.trigger("align");
this.trigger("aligntop",null,top);
return this;
};
JSYG.Alignment.prototype.alignMiddle = function() {
var center = this.getCenter(),
list = new JSYG(this.list),
parent = list[0].parentNode;
list.each(function() {
var mtx = this.getMtx().inverse(),
dim = this.getDim(),
dimP = this.getDim(parent),
pt1 = new JSYG.Vect(0,dimP.y+dimP.height/2).mtx(mtx),
pt2 = new JSYG.Vect(0,center.y).mtx(mtx);
this.setDim({
x : dim.x + pt2.x - pt1.x,
y : dim.y + pt2.y - pt1.y
});
},true);
this.trigger("align");
this.trigger("alignmiddle",null,center.y);
return this;
};
JSYG.Alignment.prototype.alignBottom = function() {
var dim = this.getGlobalDim(),
bottom = dim.y + dim.height,
list = new JSYG(this.list),
parent = list[0].parentNode;
list.each(function() {
var mtx = this.getMtx().inverse(),
dim = this.getDim(),
dimP = this.getDim(parent),
pt1 = new JSYG.Vect(0,dimP.y).mtx(mtx),
pt2 = new JSYG.Vect(0,bottom-dimP.height).mtx(mtx);
this.setDim({
x : dim.x + pt2.x - pt1.x,
y : dim.y + pt2.y - pt1.y
});
},true);
this.trigger("align");
this.trigger("alignbottom",null,bottom);
return this;
};
var aligns = ['top','bottom','left','right','center','middle'];
JSYG.prototype.align = function(alignment) {
if (aligns.indexOf(alignment.toLowerCase()) == -1) { throw new Error(alignment+" : argument incorrect ("+aligns.join()+" requis)"); }
var method = "align" + alignment.charAt(0).toUpperCase() + alignment.substr(1);
new JSYG.Alignment(this)[method]();
};
})();
|
# Copyright (c) Facebook, Inc. and its affiliates.
import numpy as np
from typing import List, Optional, Tuple
import torch
from detectron2.data.detection_utils import read_image
from ..structures import DensePoseChartResult
from .base import Boxes, Image
from .densepose_results import DensePoseResultsVisualizer
def get_texture_atlas(path: Optional[str]) -> Optional[np.ndarray]:
if path is None:
return None
# Reading images like that downsamples 16-bit images to 8-bit
# If 16-bit images are needed, we can replace that by cv2.imread with the
# cv2.IMREAD_UNCHANGED flag (with cv2 we also need it to keep alpha channels)
# The rest of the pipeline would need to be adapted to 16-bit images too
bgr_image = read_image(path)
rgb_image = np.copy(bgr_image) # Convert BGR -> RGB
rgb_image[:, :, :3] = rgb_image[:, :, 2::-1] # Works with alpha channel
return rgb_image
class DensePoseResultsVisualizerWithTexture(DensePoseResultsVisualizer):
"""
texture_atlas: An image, size 6N * 4N, with N * N squares for each of the 24 body parts.
It must follow the grid found at https://github.com/facebookresearch/DensePose/blob/master/DensePoseData/demo_data/texture_atlas_200.png # noqa
For each body part, U is proportional to the features coordinate, and (1 - V) to y
"""
def __init__(self, texture_atlas, **kwargs):
self.texture_atlas = texture_atlas
self.body_part_size = texture_atlas.shape[0] // 6
assert self.body_part_size == texture_atlas.shape[1] // 4
def visualize(
self,
image_bgr: Image,
results_and_boxes_xywh: Tuple[Optional[List[DensePoseChartResult]], Optional[Boxes]],
) -> Image:
densepose_result, boxes_xywh = results_and_boxes_xywh
if densepose_result is None or boxes_xywh is None:
return image_bgr
boxes_xywh = boxes_xywh.int().cpu().numpy()
texture_image, alpha = self.get_texture()
for i, result in enumerate(densepose_result):
iuv_array = torch.cat((result.labels[None], result.uv.clamp(0, 1)))
x, y, w, h = boxes_xywh[i]
bbox_image = image_bgr[y : y + h, x : x + w]
image_bgr[y : y + h, x : x + w] = self.generate_image_with_texture(
texture_image, alpha, bbox_image, iuv_array.cpu().numpy()
)
return image_bgr
def get_texture(self):
N = self.body_part_size
texture_image = np.zeros([24, N, N, self.texture_atlas.shape[-1]])
for i in range(4):
for j in range(6):
texture_image[(6 * i + j), :, :, :] = self.texture_atlas[
N * j : N * (j + 1), N * i : N * (i + 1), :
]
if texture_image.shape[-1] == 4: # Image with alpha channel
alpha = texture_image[:, :, :, -1] / 255.0
texture_image = texture_image[:, :, :, :3]
else:
alpha = texture_image.sum(axis=-1) > 0
return texture_image, alpha
def generate_image_with_texture(self, texture_image, alpha, bbox_image_bgr, iuv_array):
I, U, V = iuv_array
generated_image_bgr = bbox_image_bgr.copy()
for PartInd in range(1, 25):
x, y = np.where(I == PartInd)
x_index = (U[x, y] * (self.body_part_size - 1)).astype(int)
y_index = ((1 - V[x, y]) * (self.body_part_size - 1)).astype(int)
part_alpha = np.expand_dims(alpha[PartInd - 1, y_index, x_index], -1)
generated_image_bgr[I == PartInd] = (
generated_image_bgr[I == PartInd] * (1 - part_alpha)
+ texture_image[PartInd - 1, y_index, x_index] * part_alpha
)
return generated_image_bgr.astype(np.uint8)
|
from bika.lims.browser.sample import SamplesView as _SV
from bika.lims.permissions import *
from Products.CMFCore.utils import getToolByName
from zope.interface import implements
from Products.CMFPlone.utils import safe_unicode
import plone
class SamplesView(_SV):
def __init__(self, context, request):
super(SamplesView, self).__init__(context, request)
self.view_url = self.context.absolute_url() + "/samples"
if 'path' in self.contentFilter:
del(self.contentFilter['path'])
def contentsMethod(self, contentFilter):
tool = getToolByName(self.context, self.catalog)
state = [x for x in self.review_states if x['id'] == self.review_state['id']][0]
for k, v in state['contentFilter'].items():
self.contentFilter[k] = v
tool_samples = tool(contentFilter)
samples = {}
for sample in (p.getObject() for p in tool_samples):
for ar in sample.getAnalysisRequests():
batch = ar.getBatch()
if batch and ar.getBatch().UID() == self.context.UID():
samples[sample.getId()] = sample
return samples.values()
|
module.exports = require('near-sdk-as/imports');
module.exports.include = ["*/assembly/__tests__/**/*.spec.ts"];
|
strings = [
("no_string", "NO STRING!"),
("empty_string", " "),
("yes", "Yes."),
("no", "No."),
# Strings before this point are hardwired.
("credits_0", "Persistent World Module^Copyright 2010 Steven Schwartfeger (Vornne)"),
("credits_1", "Mount&Blade: Warband Copyright 2008-2014 Taleworlds Entertainment"),
("credits_2", "Game design:^Armagan Yavuz^Steve Negus^Cem Cimenbicer"),
("credits_3", "Programming:^Armagan Yavuz^Cem Cimenbicer^Serdar Kocdemir^Ozan Gumus^Mustafa Korkmaz^^Additional Programming:^Gokhan Uras^M. Furkan Yilmaz"),
("credits_4", "CG Artists:^Ozgur Saral^Mustafa Ozturk^Pinar Cekic^Ozan Unlu^Yigit Savtur^Umit Singil"),
("credits_5", "Concept Artist:^Ganbat Badamkhand"),
("credits_6", "Writing:^Steve Negus^Armagan Yavuz^Ryan A. Span"),
("credits_7", "Original Music:^Jesse Hopkins"),
("credits_8", "Voice Talent:^Tassilo Egloffstein"),
("credits_9", "Tutorial written by:^Steve Negus^Armagan Yavuz^Edward Spoerl^^^\
Horse Motion Capture Animation Supplied by:^Richard Widgery & Kinetic Impulse^^^\
Physics:^Havok^^^\
Sound and Music Program Library:^FMODex Sound System by Firelight Technologies^^^\
Skybox Textures:^Jay Weston^^^\
Chinese Translation:^Hetairoi; Gaodatailang; silentjealousy; Ginn; fallout13; James; D.Kaede; Kan2; alixyang; muyiboy^^^\
TaleWorlds Director of Communications:^Ali Erkin ^^^\
TaleWorlds Forum Programming:^Brett Flannigan ^^^\
TaleWorlds.com Forum Administrators and Moderators:^\
Janus^\
Archonsod^\
Narcissus^\
Nairagorn^\
Lost Lamb^\
Deus Ex^\
Merentha^\
Volkier^\
Instag0^\
Ativan^\
ego^\
Guspav^\
Hallequin^\
Invictus^\
okiN^\
Raz^\
rejenorst^\
Skyrage^\
ThVaz^^^\
Mount&Blade Community Suggestions and Feedback:^\
A_Mustang^\
adamlug^\
Adorno^\
alden^\
Alhanalem^\
amade^\
Anthallas^\
Alkhadias Master^\
Arch3r^\
Archevious^\
Arcas Nebun^\
Arcon^\
Arcturus^\
ares007^\
Arjihad^\
BadabombadaBang^\
Badun^\
BaronAsh^\
Berserker Pride^\
bgfan^\
bierdopjeee^\
Big_Mac^\
Binboy^\
blink180heights^\
BlodsHammar^\
Bloid^\
Brandon^\
Brego^\
chenjielian^\
cifre^\
COGlory^\
Corinthian Hoplite^\
Crazed Rabbit^\
CryptoCactus^\
CtrlAltDe1337^\
Cuther^\
Da-V-Man^\
dimitrischris^\
dstemmer^\
EasyCo506^\
Egbert^\
ethneldryt^\
eudaimondaimon^\
Faranox^\
Fawzia dokhtar-i-Sanjar^\
Fei Dao^\
Gabeed^\
GeN76^\
General_Hospital^\
GhosTR^\
glustrod^\
Gubbo^\
guspav^\
Halcyon^\
Harn^\
Hethwill^\
Highelfwarrior^\
HULKSMASH^\
Iberon^\
ignoble^\
Jack_Merchantson^\
JoG^\
Jov^\
Kazzan^\
King Jonathan the Great^\
Kleidophoros^\
knight^\
Kong Burger^\
Kristiania^\
l3asu^\
Larkraxm^\
Leandro1021DX^\
lighthaze^\
Llew2^\
Lord Rich^\
lordum_ediz^\
Lucke189^\
Mabons^\
MacPharlan^\
Madnes5^\
MagicMaster^\
Makh^\
ManiK^\
Manitas^\
Marin Peace Bringer^\
Martinet^\
MAXHARDMAN^\
Merlkir^\
miguel8500^\
Mithras^\
Moddan^\
Nate^\
Nemeo^\
Nite/m4re^\
noobalicous^\
Nord Champion^\
okiN^\
Orion^\
OTuphlos^\
Papa Lazarou^\
Phallas^\
Plazek^\
Prcin^\
PSYCHO78^\
PsykoOps^\
Reapy^\
Red River^\
Rhizobium^\
Riggea^\
Rongar^\
Ros^\
sadnhappy^\
Sarejo^\
ScientiaExcelsa^\
Scorch!^\
Seawied86^\
sebal87^\
shikamaru 1993^\
Shun^\
silentdawn^\
Sir Gowe^\
Skyrage^\
Slawomir of Aaarrghh^\
SoloSebo^\
SovietSoldier^\
Stabbing Hobo^\
Stratigos001^\
Styo^\
TalonAquila^\
test^\
The Yogi^\
Thundertrod^\
Thyr^\
Tim^\
Titanshoe^\
tmos^\
Toffey^\
Tonttu^\
Trenalok^\
Tronde^\
UberWiggett^\
Urist^\
Ursca^\
urtzi^\
Vermin^\
Viajero^\
Vincenzo^\
Vulkan^\
Warcat92^\
Welcome_To_Hell^\
Wheem^\
Wu-long^\
Yellonet^\
Yobbo^\
Yoshi Murasaki^\
Yoshiboy^\
Zyconnic^^^\
Special Thanks to Toby Lee for his ideas and in depth feedback on the combat system.^\
...and many many other wonderful Mount&Blade players!^^\
(This is only a small sample of all the players who have contributed to the game by providing suggestions and feedback.^\
This list has been compiled by sampling only a few threads in the Taleworlds Forums.^\
Unfortunately compiling an exhaustive list is almost impossible.^\
We apologize sincerely if you contributed your suggestions and feedback but were not listed here, and please know that we are grateful to you all the same...)\
"),
("credits_10", "Paradox Interactive^^President and CEO:^Theodore Bergqvist^^Executive Vice President:^Fredrik Wester\
^^Chief Financial Officer:^Lena Eriksson^^Finance & Accounting:^Annlouise Larsson^^VP Sales & Marketing US:^Reena M. Miranda\
^^VP Sales & Marketing EU:^Martin Sirc^^Distribution Manager Nordic:^Erik Helmfridsson^^Director of PR & Marketing:^Susana Meza\
^^PR & Marketing:^Sofia Forsgren^^Product Manager:^Boel Bermann\
"),
("credits_11", "Logotype:^Jason Brown^^Cover Art:^Piotr Fox Wysocki\
^^Layout:^Christian Sabe^Melina Grundel^^Poster:^Piotr Fox Wysocki^^Map & Concept Art:^Ganbat Badamkhand\
^^Manual Editing:^Digital Wordsmithing: Ryan Newman, Nick Stewart^^Web:^Martin Ericsson^^Marketing Assets:^2Coats\
^^Localization:^S&H Entertainment Localization^^GamersGate:^Ulf Hedblom^Andreas Pousette^Martin Ericson^Christoffer Lindberg\
"),
("credits_12", "Thanks to all of our partners worldwide, in particular long-term partners:\
^Koch Media (Germany & UK)^Blue Label (Italy & France)^Friendware (Spain)^New Era Interactive Media Co. Ltd. (Asia)\
^Snowball (Russia)^Pinnacle (UK)^Porto Editora (Portugal)^Hell-Tech (Greece)^CD Projekt (Poland, Czech Republic, Slovakia & Hungary)\
^Paradox Scandinavian Distribution (Scandinavia)\
"),
("profile_banner_selection_text", "Choose a banner for your profile:"),
("use_default_banner", "Use Faction's Banner"),
("game_type_1", "Conquest"),
("game_type_2", "Quick Battle"),
("game_type_3", "No Money"),
("game_type_4", "Feudalism"),
("game_type_5", "Permanent Death"),
("game_types_end", "game_types_end"),
("fac_1_default_name", "Red Faction"),
("fac_2_default_name", "White Faction"),
("fac_3_default_name", "Blue Faction"),
("fac_4_default_name", "Green Faction"),
("fac_5_default_name", "Yellow Faction"),
("fac_6_default_name", "Purple Faction"),
("fac_7_default_name", "Orange Faction"),
("fac_8_default_name", "Black Faction"),
("leave_edit_mode", "Leave scene editing mode?"),
("distance_reg1_sq_distance_reg2", "Distance: {reg1}, sq distance: {reg2}."),
("choose_faction_banner", "Choose the banner for your faction:"),
("keep_current_banner", "Keep the current banner"),
("next", "Next"),
("previous", "Previous"),
("spectate", "Spectate"),
("join_game", "Join Game"),
("choose_an_option", "Choose an option:"),
("choose_an_option_targeting_s1", "Choose an option, targeting: {s1}"),
("change_options", "Change Options"),
("change_controls", "Change Controls"),
("show_rules", "Show Game Rules"),
("show_info", "Show Game Information"),
("admin_panel", "Administrator Panel"),
("admin_tools", "Administrator Tools"),
("admin_items", "Administrator Items"),
("kick_player", "Kick a player"),
("ban_player_temp", "Ban a player temporarily"),
("ban_player_perm", "Ban a player permanently"),
("mute_player", "Mute / unmute a player"),
("kill_player", "Kill a player"),
("fade_player_out", "Fade a player out"),
("freeze_player", "Freeze / thaw a player"),
("teleport_to_player", "Teleport to a player"),
("teleport_behind_player", "Teleport behind a player"),
("teleport_forwards", "Teleport forwards"),
("equip_admin_armor", "Equip admin armor"),
("become_invisible", "Become invisible"),
("refill_health", "Refill health"),
("become_godlike", "Become godlike"),
("spawn_admin_horse", "Spawn admin horse"),
("remove_admin_horses", "Remove admin horses"),
("remove_stray_horses", "Remove stray horses"),
("teleport_to_ships", "Teleport to ships"),
("reset_sunken_ships", "Reset sunken ships"),
("lock_current_faction", "Lock current faction"),
("unlock_current_faction", "Unlock current faction"),
("quit", "Quit"),
("choose_a_player_to_s0", "Choose a player to {s0}:"),
("kick", "kick"),
("ban_temp", "ban temporarily"),
("ban_perm", "ban permanently"),
("mute", "mute / unmute"),
("kill", "kill"),
("fade_out", "fade out"),
("freeze", "freeze / thaw"),
("teleport_to", "teleport to"),
("teleport_behind", "teleport behind"),
("propose_as_lord", "propose as lord"),
("send_message_to", "send the message to"),
("give_take_door_key", "give / take away door key"),
("give_take_money_chest_key", "give / take away money chest key"),
("give_take_item_chest_key", "give / take away item chest key"),
("allow_disallow_announcing", "allow / disallow announcing for"),
("outlaw", "outlaw"),
("request_poll", "Request Poll"),
("choose_a_poll_type", "Choose a poll type:"),
("s0__reg0_", "{!}{s0} ({reg0})"),
("choose_poll_scene", "Change the scene"),
("choose_poll_kick", "Kick a player"),
("choose_poll_ban", "Ban a player"),
("choose_poll_faction_lord", "Change your faction's lord"),
("choose_a_scene", "Choose a scene:"),
("faction_admin", "Faction Administration"),
("change_faction_banner", "Change faction banner"),
("change_faction_name", "Change faction name"),
("kick_player_from_faction", "Kick player from faction"),
("outlaw_player", "Outlaw player"),
("manage_door_keys", "Manage door keys"),
("manage_money_chest_keys", "Manage money chest keys"),
("manage_item_chest_keys", "Manage item chest keys"),
("manage_announcers", "Manage announcers"),
("choose_a_faction", "Choose a faction:"),
("declare_faction_hostile", "Declare a faction hostile"),
("offer_faction_peace", "Offer a faction peace"),
("log_admin_target_player", "Admin {s0} ({reg0}) {s3} player {s1} ({reg1})."),
("log_admin_target_self", "Admin {s0} ({reg0}) {s3}."),
("log_admin_kick", "kicked"),
("log_admin_ban_temp", "temporarily banned"),
("log_admin_ban_perm", "permanently banned"),
("log_admin_mute", "muted / unmuted"),
("log_admin_kill", "killed"),
("log_admin_fade_out", "faded out"),
("log_admin_freeze", "froze / thawed"),
("log_admin_teleport_to", "teleported to"),
("log_admin_teleport_behind", "teleported behind"),
("log_admin_teleport_forwards", "teleported forwards"),
("log_admin_get_armor", "equipped admin armor"),
("log_admin_get_invisible", "became invisible"),
("log_admin_refill_health", "refilled health"),
("log_admin_become_godlike", "became godlike"),
("log_admin_get_horse", "spawned an admin horse"),
("log_admin_remove_horses", "removed admin horses"),
("log_admin_remove_stray_horses", "removed stray horses"),
("log_admin_teleport_to_ships", "teleported to the next ship"),
("log_admin_reset_ships", "reset sunken ships"),
("log_admin_lock_faction", "{s4}locked faction {s5}"),
("log_admin_cheat_money", "took {reg0} gold"),
("log_admin_cheat_item", "took item '{s4}'"),
("poll_change_scene", "Change scene to {s1}"),
("poll_kick_player", "Kick player {s1}"),
("poll_ban_player", "Ban player {s1}"),
("poll_faction_lord", "Make {s1} the lord of {s2}"),
("poll_log", "Player {s3} started a poll: {s0}"),
("poll_existing", "Another poll is already in progress."),
("poll_invalid", "Invalid poll."),
("poll_result_no", "The poll was rejected."),
("poll_result_yes", "The poll was accepted."),
("poll_result_admin_no", "The poll was rejected by an administrator."),
("poll_result_admin_yes", "The poll was accepted by an administrator."),
("poll_requester_keys", "Called by: {s1} - yes: F9, no: F8, abstain: F7"),
("poll_time_left", "({reg0} seconds left)"),
("departed_player", "<departed player>"),
("set_s0", "Set {s0}"),
("s0_reg1", "{!}{s0} {reg1}"),
("invalid_respawn_period", "Invalid respawn period."),
("invalid_max_players", "Invalid maximum players value."),
("invalid_scene", "Invalid scene."),
("command_not_implemented", "Command is not implemented."),
("bot_count", "Random testing bots:"),
("round_max_seconds", "Seconds before removing spawned items:"),
("respawn_period", "Respawn period (seconds):"),
("num_bots_voteable", "Maximum herd animals:"),
("scenes_voteable", "Allow polls to change scene:"),
("factions_voteable", "factions_voteable:"),
("player_respawn_as_bot", "player_respawn_as_bot:"),
("kick_voteable", "Allow polls to kick players:"),
("ban_voteable", "Allow polls to ban players:"),
("valid_vote_ratio", "Poll accept threshold (%):"),
("auto_team_balance_limit", "auto_team_balance_limit:"),
("starting_gold", "Starting gold (%):"),
("combat_gold_bonus", "Combat gold bonus (%):"),
("round_gold_bonus", "round_gold_bonus:"),
("player_banners_allowed", "player_banners_allowed:"),
("force_default_armor", "Weather:"),
("team_points_gained_for_flags", "team_points_gained_for_flags:"),
("points_gained_for_capturing_flags", "points_gained_for_capturing_flags:"),
("game_time_limit", "Game time limit (minutes):"),
("team_point_limit", "Victory condition time (minutes):"),
("defender_spawn_count", "defender_spawn_count:"),
("disallow_ranged_weapons", "Admin gold disabled:"),
("use_class_limits", "use_class_limits"),
("class_limit_player_count", "class_limit_player_count"),
("squad_size", "squad_size"),
("scale_squad", "Initial stockpile multiplier:"),
("build_points_team1", "build_points_team1"),
("build_points_team2", "build_points_team2"),
("allow_multiple_firearms", "allow_multiple_firearms"),
("enable_bonuses", "enable_bonuses"),
("bonus_strength", "bonus_strength"),
("bonus_range", "bonus_range"),
("fall_off_horse", "fall_off_horse"),
("horse_dying", "horse_dying"),
("auto_kick", "auto_kick"),
("max_teamkills_before_kick", "max_teamkills_before_kick"),
("auto_horse", "auto_horse"),
("auto_swap", "auto_swap"),
("limit_grenadier", "limit_grenadier"),
("limit_skirmisher", "limit_skirmisher"),
("limit_rifle", "limit_rifle"),
("limit_cavalry", "limit_cavalry"),
("limit_lancer", "limit_lancer"),
("limit_hussar", "limit_hussar"),
("limit_dragoon", "limit_dragoon"),
("limit_cuirassier", "limit_cuirassier"),
("limit_heavycav", "limit_heavycav"),
("limit_artillery", "limit_artillery"),
("limit_rocket", "limit_rocket"),
("limit_sapper", "limit_sapper"),
("limit_musician", "limit_musician"),
("limit_sergeant", "limit_sergeant"),
("limit_officer", "limit_officer"),
("limit_general", "limit_general"),
("max_players", "Maximum number of players:"),
("friendly_fire", "friendly_fire:"),
("melee_friendly_fire", "melee_friendly_fire:"),
("friendly_fire_damage_self_ratio", "friendly_fire_damage_self_ratio:"),
("friendly_fire_damage_friend_ratio", "friendly_fire_damage_friend_ratio:"),
("ghost_mode", "Spectator camera:"),
("control_block_direction", "Control block direction:"),
("combat_speed", "Combat speed:"),
("add_to_game_servers_list", "Add to official game servers list:"),
("anti_cheat", "Enable Valve Anti-cheat (Requires valid Steam account):"),
("combat_speed_0", "Slowest"),
("combat_speed_1", "Slower"),
("combat_speed_2", "Medium"),
("combat_speed_3", "Faster"),
("combat_speed_4", "Fastest"),
("automatic", "Automatic"),
("by_mouse_movement", "By mouse movement"),
("free", "Free"),
("stick_to_any_player", "Lock to any player"),
("stick_to_team_members", "Lock to team members"),
("stick_to_team_members_view", "Lock to team members' view"),
("game_rules", "Game rules:^"),
("s0_reg0", "{!}{s0} {reg0}"),
("s0_s1", "{!}{s0} {s1}"),
("s2_s3", "{!}{s2}^{s3}"),
("s0__s1", "{!}{s0}, {s1}"),
("scene_name", "Scene name:"),
("server_name", "Server name:"),
("game_password", "Game password:"),
("welcome_message", "Welcome message:"),
("game_type", "Game type:"),
("scene", "Scene:"),
("start_scene", "Start scene"),
("edit_scene", "Edit scene"),
("zero", "0"),
("remaining_time_reg0_s0reg1_s1reg2", "Remaining time: {reg0}:{s0}{reg1}:{s1}{reg2}"),
("respawning_in_reg0_seconds", "Respawning in {reg0} seconds"),
("press_select_spawn_point", "Press 1-5 to select spawn point"),
("press_select_spawn_area", "Press ~ to change: {s1}"),
("restart_as_peasant_commoner", "restart as a peasant commoner"),
("number_of_factions_reg0", "Number of factions: {reg0}"),
("victory_condition_none", "No victory condition"),
("victory_condition_castles", "Victory: hold all castles for {reg0} minutes"),
("respawn_with_partial_health", "Respawn with partial health"),
("respawn_with_full_health", "Respawn with full health"),
("herd_animal_limit_reg0", "Herd animal limit: {reg0}"),
("always_fine", "Always fine"),
("always_raining", "Always raining"),
("dynamic", "Dynamic"),
("player_name", "Player Name"),
("class", "Class"),
("kills", "Kills"),
("deaths", "Deaths"),
("ping", "Ping"),
("score", "Score"),
("score_reg0", "Score: {reg0}"),
("all_players", "All Players"),
("reg0_players", "{reg0} players"),
("reg0_player", "{reg0} player"),
("reg0_players_of_reg1", "{reg0} players of {reg1}"),
("reg0_player_of_reg1", "{reg0} player of {reg1}"),
("reg0", "{!}{reg0}"),
("s0", "{!}{s0}"),
("enable_s0", "Enable {s0}"),
("disable_s0", "Disable {s0}"),
("un", "un"),
("toggle_name_labels", "Toggle name labels"),
("toggle_faction_in_name_labels", "Toggle faction in name labels"),
("toggle_chat_overlay", "Toggle chat overlay"),
("toggle_local_faction_chat", "Toggle local or faction chat"),
("attach_cart_pack", "Attach nearby cart / pack"),
("detach_cart_pack", "Detach cart / pack"),
("toggle_head", "Toggle head armor"),
("discard_body", "Discard body armor"),
("discard_foot", "Discard foot armor"),
("toggle_hand", "Toggle hand armor"),
("reveal_money_pouch_to_target", "Reveal money pouch to target"),
("toggle_clickable_animation_menu", "Toggle clickable animation menu"),
("toggle_muting_global_chat", "Toggle muting global chat"),
("action_menu_end", "action_menu_end"),
("s0_are_you_sure", "{s0} - are you sure?"),
("confirmation", "confirmation"),
("display_name_labels", "Display name labels"),
("hide_faction_in_name_labels", "Hide faction in name labels"),
("display_chat_overlay", "Display chat overlay"),
("overlay_shows_faction_chat", "Overlay shows faction chat instead of local"),
("disable_automatic_shadow_recalculation", "Disable automatic shadow recalculation"),
("mute_global_chat", "Mute global chat"),
("non_clickable_animation_menu", "Non-clickable animation menu"),
("disable_rain_snow_particles", "Disable rain and snow particles"),
("reg0__s0", "{!}{reg0}. {s0}"),
("menu_guestures", "Guestures"),
("menu_neutral", "Neutral"),
("menu_hostile", "Hostile"),
("menu_robbery", "Robbery"),
("anim_cheer", "Cheer"),
("anim_clap", "Clap"),
("anim_raise_sword", "Raise sword"),
("anim_hands_on_hips", "Hands on hips"),
("anim_arms_crossed", "Arms crossed"),
("anim_stand_still", "Stand still"),
("anim_away_vile_beggar", "Away with you, vile beggar"),
("anim_my_lord", "My lord"),
("anim_almost_harvesting_season", "It's almost harvesting season"),
("anim_whats_this_then", "What's this then, eh?"),
("anim_out_for_a_stroll_are_we", "Out for a stroll, are we?"),
("anim_we_ride_to_war", "We ride to war"),
("anim_less_talking_more_raiding", "Less talking, more raiding"),
("anim_you_there_stop", "You there, stop!"),
("anim_war_cry", "War cry"),
("anim_tear_you_limb_from_limb", "I'll tear you limb from limb"),
("anim_better_not_be_a_manhunter", "You better not be a manhunter"),
("anim_drink_from_your_skull", "I will drink from your skull"),
("anim_gods_will_decide_your_fate", "Today the gods will decide your fate"),
("anim_nice_head_on_shoulders", "That's a nice head you have on your shoulders"),
("anim_hunt_you_down", "We'll hunt you down"),
("anim_dead_men_tell_no_tales", "Dead men tell no tales"),
("anim_stand_and_deliver", "Stand and deliver!"),
("anim_your_money_or_your_life", "Your money or your life!"),
("anim_have_our_pay_or_fun", "We'll have our pay, or we'll have our fun"),
("anim_word_about_purse_belongings", "My men would like a word with you about your purse"),
("anim_easy_way_or_hard_way", "We can do this the easy way, or the hard way"),
("anim_everything_has_a_price", "Everything has a price, even your life"),
("anim_slit_your_throat", "I'd slit your throat for a trinket"),
("log_animation", "*ANIMATION* [{s1}] {s0}"),
("done", "Done"),
("use", "Use"),
("buy", "Buy"),
("buy_sell", "Buy / Sell"),
("buy_sell_craft", "Buy / Sell / Craft"),
("take", "Take"),
("take_put_craft", "Take / Put / Craft"),
("dont_have_enough_money", "You don't have enough money."),
("cant_equip_item", "You can't equip this item."),
("collect_reg1_gold", "Collect {reg1} gold"),
("rest", "Rest"),
("rest_horse", "Rest horse"),
("winch_lower", "Lower"),
("winch_raise", "Raise"),
("winch_drop", "Drop"),
("destructible", "Destructible"),
("cut_down", "Cut down"),
("mine", "Mine"),
("harvest", "Harvest"),
("prune", "Prune"),
("burn", "Burn"),
("destroy_s1", "Destroy {s1}"),
("destroy_all_items_cart", "Destroy all items in cart"),
("process_wood", "Process wood"),
("process_metal", "Smelt metal"),
("process_hammer_metal", "Hammer metal"),
("process_grind", "Grind"),
("process_cook", "Cook"),
("process_press", "Press"),
("process_brew", "Brew"),
("process_tavern", "Serve"),
("process_preserve", "Preserve"),
("process_spin", "Spin"),
("process_weave", "Weave"),
("process_cut", "Cut"),
("process_leather", "Tan leather"),
("stockpile", "Stockpile"),
("stockpile_nearly_full", "Stockpile is nearly full"),
("stockpile_full", "Stockpile is full"),
("export", "Export"),
("export_for_s1", "Export for {s1}"),
("import", "Import"),
("build", "Build"),
("access", "Access"),
("attach", "Attach"),
("not_close_enough", "Not close enough"),
("s0__s1_", "{!}{s0} ({s1})"),
("drop_money_bag", "Drop money bag"),
("deposit_money_chest", "Deposit in money chest"),
("withdraw_money_chest", "Withdraw from money chest"),
("admin_cheat_money", "Give yourself money (Admin)"),
("no_money_chest_nearby", "No money chests nearby."),
("cant_open_money_chest", "You can't open the money chest."),
("not_enough_money_in_chest", "Not enough money in the chest."),
("gold_reg2", "Gold: {reg2}"),
("buy_banner_faction", "Buy a banner for the {s1}"),
("stock_count_reg0", "Stock count: {reg0}"),
("crafting_refund_reg0_reward_reg1", "Crafting refund: {reg0} reward: {reg1}"),
("crafting_reward_reg1", "Crafting reward: {reg1}"),
("selling_price_reg0", "Selling price: {reg0}"),
("womens_clothes", "Women's clothes"),
("item_id", "Item ID:"),
("spawn_s1", "Spawn {s1}"),
("troop_not_available", "Not available to you^{s0}"),
("troop_train", "Train to be {s1} for the {s2}"),
("troop_assume_role", "Assume the role of {s1} of the {s2}"),
("troop_become", "Become {s1} with the {s2}"),
("troop_become_for", "Become {s1} for the {s2}"),
("troop_cost", "{s0}^Cost: {reg10}"),
("troop_strength_agility", "{s0}^Strength: {reg10}^Agility: {reg11}"),
("troop_weapon_proficiencies", "{s0}^One handed: {reg10}^Two Handed: {reg11}^Polearms: {reg12}^Archery: {reg13}^Crossbows: {reg14}^Throwing: {reg15}"),
("requires_strength_reg1", "Requires strength: {reg1}"),
("requires_power_draw_reg1", "Requires power draw: {reg1}"),
("requires_power_throw_reg1", "Requires power throw: {reg1}"),
("requires_shield_reg1", "Requires shield: {reg1}"),
("requires_riding_reg1", "Requires riding: {reg1}"),
("damage_reg1_speed_reg2", "Damage: {reg1}% Speed {reg2}%"),
("accuracy_reg1_reload_reg2", "Accuracy: {reg1}% Reload: {reg2}%"),
("joined_the_s1", "You have joined the {s1}."),
("s0_joined_the_s1", "{s0} joined the {s1}."),
("must_leave_s1_first", "You must leave the {s1} first."),
("not_a_member_of_s1", "You are not a member of the {s1}."),
("s1_captured_s2", "The {s1} have captured {s2}!"),
("your_faction_not_hostile_to_s1", "Your faction is not hostile to the {s1}."),
("your_faction_not_captured_required_points", "Your faction has not captured the required secondary points."),
("door_locked_by_s1", "The door is locked by the {s1}."),
("door_bolted", "The door is bolted on the other side."),
("chest_locked_by_s1", "This chest is locked by the {s1}."),
("s0_killed_faction_member", "{s0} killed a member of the same faction!"),
("s0_killed_friendly_faction_member", "{s0} killed a member of a friendly faction!"),
("s0_has_been_outlawed", "{s0} has been outlawed!"),
("you_have_been_outlawed", "You have been outlawed!"),
("your_outlaw_rating_now_reg1", "Your outlaw rating is now {reg1}."),
("scene_error_this_faction_is_not_active", "Scene error: this faction is not active!"),
("you_are_not_lord_of_s1", "You are not the lord of the {s1}."),
("you_are_now_lord_of_s1", "You are now the lord of the {s1}."),
("s10_now_lord_of_s1", "{s10} is now the lord of the {s1}."),
("s10_now_known_as_s1", "The {s10} are now known as the {s1}."),
("banner_used_by_s1", "That banner is used by the {s1}."),
("s1_doesnt_need_merc", "The {s1} doesn't need mercenaries."),
("s1_reign_supreme", "The {s1} reign supreme!"),
("s1_now_hostile_towards_s10", "The {s1} are now hostile towards the {s10}."),
("s1_and_s10_made_peace", "The {s1} and the {s10} have made peace."),
("lord_of_s1_withdraws_offer_of_peace", "The lord of the {s1} withdraws the offer of peace."),
("lord_of_s1_offers_peace", "The lord of the {s1} offers peace."),
("not_riding_necessary_horse", "You are not riding the necessary horse."),
("already_attached_cart", "Already attached to another cart."),
("resource_required", "More resources required."),
("not_at_mast_or_rudder", "You are not close enough to the mast or rudder."),
("craft_not_skilled", "You don't seem to be skilled enough."),
("craft_wrong_resources", "You don't seem to have the right resources."),
("no_horse", "You have no horse."),
("too_wounded_to_rest", "Too wounded to rest here."),
("too_hungry_to_rest", "Too hungry to rest here."),
("horse_too_wounded_to_sell", "Your horse is too wounded to sell."),
("dismount_to_sell", "Dismount to sell your horse."),
("item_too_long_for_container", "That item is too long to fit inside the container."),
("cant_put_money_bag_in_container", "You can't put money bags inside containers."),
("too_far_away_loot", "Too far away to loot."),
("herd_animal_limit_reached", "Herd animal limit reached."),
("s1_reveals_money_pouch_containing_about_reg1", "{s1} reveals a money pouch containing about {reg1} coins."),
("you_reveal_money_pouch_to_s1", "You reveal your money pouch to {s1}."),
("s1_revealed_money_pouch_containing_reg1_to_s2", "{s1} revealed a money pouch containing about {reg1} coins to {s2}."),
("your_target_too_far_away", "Your target is too far away."),
("no_target_selected", "No target selected."),
("chat_format", "[{s1}] {s0}"),
("send_message_to_players_nearby", "Send message to players nearby:"),
("send_message_to_the_s11", "Send message to the {s11}:"),
("change_name_of_your_faction", "Change the name of your faction:"),
("send_admin_message", "Send administrator message:"),
("send_admin_message_to_s1", "Send administrator message to {s1}:"),
("admin_chat_format", "*ADMIN* [{s1}] {s0}"),
("admin_chat_player_format", "*PLAYER* [{s1}] {s0}"),
("admin_chat_to_player_format", "*ADMIN* [{s1}] > [{s2}] {s0}"),
("admin_announcement_format", "*ANNOUNCEMENT* [{s1}] {s0}"),
("local_chat_log_format", "*LOCAL* {s0}"),
("faction_chat_log_format", "*FACTION* {s2} {s0}"),
("error_unable_to_find_link_scene_prop", "Unable to find link for scene prop - instance: {reg10} kind: {reg11} link kind: {reg12} link id: {reg13}"),
("error_unlinked_scene_prop", "Unlinked scene prop instance: {reg10} kind: {reg11} link id: {reg12} - only valid if this prop has not been added since loading the scene."),
("error_edit_mode_not_enabled", "Edit mode is not enabled in the Warband launcher: you will not be able to edit this scene."),
("no_more_unlinked_scene_props", "No more unlinked scene props."),
("error_scene_prop_0_pw", "Error: scene prop instance id 0 (kind {reg0}) is scripted (pw), so it might not work correctly."),
("error_load_out_id_reg0_not_defined", "Error: the inventory load out id {reg0} is not defined."),
("game_type_1_info", "This is the basic original game type: resources can be gathered to sell or to craft items like weapons, for helping your faction prosper and conquer."),
("game_type_2_info", "Stock piles and crafting are disabled, so unlimited items can be bought without requiring the support of resource gathering or crafting."),
("game_type_3_info", "The concept of money has been removed from this game type, so resource gathering and crafting are the only things required to obtain items."),
("game_type_4_info", "Exporting resources only transfers money to the castle chest, with no personal reward, and resource stockpiles have no cost for buying or selling."),
("game_type_5_info", "After dying or disconnecting, you will be reset to a peasant commoner with no money."),
("pw_welcome", "{s10}\
^^Game Type: {s11}\
^-----------------------------------------------------------\
^{s12}\
^^General Information\
^-----------------------------------------------------------\
^When you first join the game, you start in the commoners faction as a peasant; to train as another troop type and join a faction at the same time, go to one of the castles and use a training station.\
You can only join another faction as a commoner, or to become a commoner. The outlaws faction can be joined by training as one, but you will also get transferred to it after multiple team kills.\
Troop type, faction, gold, and outlaw rating are saved if you crash or disconnect from the game, and when you rejoin your previous stats will be given back. At peasant, ruffian, or brigand training stations, clicking use will switch faction to commoners or outlaws without respawning, keeping your previous class until you die; but after that you will respawn as the targeted troop, unless your previous troop type was peasant or serf. You can also switch factions by just clicking use if you are using the targeted troop type already, or at the mercenary training stations (which are enabled only when the faction owns no castles).\
For a certain period after a player spawns they will be invulnerable to attack, and also unable to attack others; when this period is over the default weapons will be given.\
^^To buy items from stockpiles, sheath your weapons and tap the use button (rather than hold it down); to sell, hold the item in your hand and tap use. When bought, items are spawned right on top of the stockpile, so you might have to aim around until the 'Equip' overlay shows.\
If the item requires higher stats or skills than your troop has, damage dealt, movement speed, accuracy, reload speed, and damage recieved might be affected in a bad way; probably making the items effectively worse than other ones with lower stats that your troop meets requirements for.\
Holding down the use key until the bar finishes is for crafting the item: you need to have the required engineering skill and resources equipped. If the item stockpile has no 'Stock count' line in the stats overlay, it is not craftable, and an unlimited amount of items can be bought there.\
^^Trees can be cut down for wood and mines hit for iron with the required labouring skill and tools; trees regrow after a while, and mines respawn after a longer period. Characters can mine faster if they have eaten food, as with having higher labouring skill and the correct tool.\
These resources can be refined with engineering skill at stations in the castles - the carpenter's bench and the forge - and then used for crafting weapons or other items.\
The forge can be used to combine or split iron bar types into the larger or smaller type: to split, stand on the left side and use the forge, to combine, stand on the right.\
Certain destructable items like doors or bridges can be repaired with engineering skill, tools, and resources like wood (of which any wood item will do, though items can have different resource values).\
^^Fish schools placed will move randomly around the scene, staying near areas of shallow water, and occasionally making a ripple or splash depending on the number of fish in the school; they can be fished with spears or nets.\
Wheat can be planted in fields by throwing grain and then waiting for it to grow; the ground should be watered for best growth, and harvesting should not be started until the wheat is fully grown to avoid damaging it. Wheat sheaves can be ground into flour and then baked into bread with water; sheaves can also be used with water to brew beer.\
Vines need to be pruned with knives to grow grapes, which can then be pressed into must and then into wine; the wine and beer barrels need to be taken to a tavern bench to serve into jars or jugs.\
Flax can be harvested by pulling the plants up by the roots, then spinning and weaving into linen cloth, which can be cut into smaller pieces with shears, and woven back into a larger roll again.\
Certain animals can be herded then slaughtered for hides and meat, using the correct tools; the hides can be tanned into leather in tannery pools, then cut into smaller pieces if needed; the meat can be cooked to give more nutrition, or salted to preserve for stockpiling or export.\
The herding crook can be used to move animals away from your character in the default mode, and with the alternate mode to stop and attach them to a nearby heard; animals will periodically go back to following the herd leader, so you should try detect which one that is and concentrate on herding it. The animals will not reproduce unless there are at least two adults in the herd.\
Higher labouring skill sometimes means that less resources are required when processing food, because less is spoiled.\
^^To sail a ship requires two players (or one player moving around a lot): using the up and down arrow keys near the main mast will change the forward or backward movement; using the left and right arrow keys near the rudder will steer the ship, as long as the steering player doesn't move away; up or down keys at the rudder will center it. Your level of sailing skill determines how fast you can sail.\
When the ramp is resting on the deck, using will move it away from the player, extending over the side, then using once more will move it back to the center; if the ship is not moving the ramp will angle down near ground level. The up arrow key can be used to climb up the side of the ship from close beside the middle section of the hull.\
Ships will be damaged when colliding with the ground, large objects in the water, or other ships, but they can be repaired with wood by engineers; otherwise, after enough damage they will sink.\
If you stay under water for too long you will drown - more quickly the faster you move.\
^^An action menu can be accessed by holding down a control (same as the native character window); from it your head and hand armor can be toggled on and off (as if you put it in your pocket) but body and foot armor can only be dropped.\
Armor can also be looted off bodies for some time after their death, from very close range; to do it you must aim at the blood splat and press the target control, then transfer between the inventory slots that appear. You probably want to change the target key to something more easily accessible than the default.\
^^Carts are attached by using from the front side with no items wielded in your hands, and the inventory is accessed by using from the other side; detaching can be done by using again. Alternatively, the action menu can be used to attach or detach carts.\
^^The money bag window (which is toggled with the native inventory key) can be used to drop money bag items, or access money chests. The value of the last money bag picked up is used when dropping one or pressing attack to get the money, rather than the particular bag you have wielded (due to game engine limitations).\
To deposit gold in a chest you must be near it, and to withdraw you also either need to be the lord of the associated faction or have been given the key, break the chest open with a weapon, or use a lock pick to open it (looting skill makes success more likely). To tell if a chest is unlocked, press and release the use button: if you hear a click sound it is locked, but if no sound it is unlocked; holders of the faction keys for castle chests can relock them by clicking use. Locked teleport doors work in a very similar way.\
When you die, a bag with a small percentage of your money will be dropped, along with all your equipped items.\
^^Troops with the wound treatment skill can heal other badly wounded players by hitting them with the surgeon's scalpel, up to a certain percentage of full health, based on skill; while resting on beds can be used by less wounded troops to heal fully - each type of bed can have a different minimum health required - which requires and uses up food eaten.\
Players hit with a poisoned dagger will lose health slowly over time until they die, unless a doctor treats them with a healing herb leaf.\
^^Enemy castles can be captured for your faction by using a primary flag pole (probably on top of the castle keep) while holding a your faction's banner item; some castles might require secondary points to be captured first, either all of them, at least one, or a combination of both. Lords can also give away their own castles peacefully by using the other faction's banner on a primary capture point. The banner and name of a faction can be changed by the lord; when the banner is changed, armor on players of that faction will be updated, but hand banners will not.\
^^To shout with the local chat messages, hold down right shift while pressing enter. With faction chat messages, the lord can send messages that are displayed in big letters in the same way, with right shift.\
A normal player can only send admin chat messages that are visible to other admins on the server; admins can also send messages to specific players after selecting from the list brought up with F11 (press escape to cancel the list and target everyone), and can also hold right shift when sending to send announcements to all players or warnings to a specific player.\
As admin, holding down right shift while voting overrides the poll. The agent selected by pressing the target key with shift held down (if any) will be used as the target for admin tools and chat."),
("pw_editor_welcome", "Press F1 for editing information."),
("pw_editor_info", "General editing information^-----------------------------------------------------------^^\
* Scripted scene props (starting with pw) do not work in this mode, only on a dedicated server.^\
* Make sure the first few scene props placed are not scripted (start with pw): instance id 0 can't be handled by some scripts.^\
* Factions are numbered from 0 - 9, with commoners = 0, outlaws = 1, and the 4 castle factions starting from 2.^\
* Castles are numbered from 2 - 9, and start the mission owned by the corresponding castle faction: for example, the default 'White Faction' starts with castle 3.^\
* Factions are enabled for the scene if at least one capture point is placed for their starting castle.^\
* Spawn points are 0 - 4 for commoners, 10 - 14 for outlaws, 20 - 24 for castle 1, and so on, up to 90 - 94 for castle 8.^\
* For rotating scene props like trees or destroyable doors, place the origin at or slightly above ground level.^\
* Make sure to adjust ships to the right height for the water level on the hull, as the scripts will use that since each ship mesh has a different height.^\
* Place at least one castle sign for every castle (or capturable area), so the stats chart and capture messages will use proper names.^\
* The scene props starting with code_ should not be placed manually in your scene: they are spawned by the scripts as needed.^\
* Fish schools should be placed somewhere in water, and will move randomly to nearby shallow parts.^\
* The maximum number of lights in a scene that will be visible is 10: a limitation of the game engine.^\
* The lift platform must be linked with two corresponding winches, which will determine the vertical movement range: the plaform will be able to move between 1 unit below the upper winch to 1 unit below the lower one.^\
^Edit scene mode keys^-----------------------------------------------------------^^\
F1 = this information^\
F2 = scene prop editor value information^\
F3 = list of castle names with numbers^\
F8 = move agent to the positions of all scene props that can't find the other prop they need to link with.^\
F9 = spawn new random player agent^\
F10 = move agent to the positions of all scene props added to the ship collision list when the scene was loaded^\
F11 = spawn test horse at agent position^\
F12 = measure distance to the first pointer_arrow prop"),
("pw_editor_values_info", "Scene prop editor values 1 and 2^-----------------------------------------------------------^^\
pw_buy_*:^value 1 = faction + multiplier^value 2 = design target stock count^^\
pw_stockpile_*:^value 1 = faction + multiplier^value 2 = 01 - 09: initial and target count / 10; 10 - 120: stock limit / 10^^\
pw_export_*:^value 1 = faction + multiplier^value 2 = faction tax multiplier^^\
pw_import_*:^value 1 = faction + multiplier^^\
pw_local_*_price_area:^value 1 = multiplier^scale x = scene area of effect (prices of props outside will be interpolated between other areas of this type)^^\
pw_change_troop_*:^value 1 = faction + multiplier^^\
pw_door_rotate_*:^value 1 = faction / castle id^value 2 = options bitfield (0x1 = start open, 0x2 = bolted, 0x4 = not pickable, 0x8 = half hit points, 0x10 = start destroyed)^^\
pw_door_teleport_*:^value 1 = faction / castle id^value 2 = linking id with x^^\
pw mines:^value 1 = initial hit points / 1000^value 2 = respawn time multiplier^^\
pw trees, bushes, plants, fields, vines:^value 2 = respawn time multiplier^^\
pw bridges:^value 2 = linking id with two x_footing props^^\
pw walls, ladders:^value 2 = linking id with x_build^^\
pw_construction_box:^value 1 = hit points multiplier^^\
pw portcullises, drawbridges, trapdoors:^value 1 = options bitfield (0x1 = start closed)^value 2 = linking id with x_winch^^\
pw_lift_platform:^value 2 = linking id with two x_winch props^^\
pw ships:^value 1 = initial ramp position (0 = center, 1 = left, 2 = right)^Only place the hull, the other parts are spawned at mission start.^^\
pw_ferry_boat:^value 2 = linking id with two pw_ferry_platform props^^\
pw_castle_capture_point:^value 1 = faction / castle id^value 2 = capture type (0 = primary, 1 = all secondary, 2 one secondary)^^\
pw_castle_sign:^value 1 = faction / castle id^value 2 = number of the name, listed in the F3 information window (name can't be used by another castle)^^\
pw_castle_money_chest:^value 1 = faction / castle id^value 2 = initial gold^^\
pw_item_chest_*:^value 1 = faction / castle id^value 2 = starting inventory load out id^^\
pw_scene_day_time:^value 1 = scene hour of day (0 - 23)^^\
pw_scene_cloud_haze:^value 1 = global cloud (adjusts skybox)^value 2 = global haze^^\
pw_scene_ambient_sound:^value 1 = sound id offset from snd_fire_loop^value 2 = probability the sound will be played (0 - 100) or 127 = looping^^\
pw_scene_light:^value 1 = flicker magnitude^value 2 = flicker interval^scale x = red, 1.00 = 100^scale y = green, 1.00 = 100^scale z = blue, 1.00 = 100^^\
pw_scene_precipitation:^value 1 = minimum precipitation intensity^value 2 = maximum precipitation intensity^scale x = precipitation area size^^\
pw_scene_fog:^position z (height) = visiblity distance^scale x = red, 1.00 = 255^scale y = green, 1.00 = 255^scale z = blue, 1.00 = 255^^\
pw_scene_snow_level:^position z (height) = level in scene where rain changes to snow^^\
pw_scene_wind_direction:^value 1 = minimum wind strength^value 2 = maximum wind strength^^\
pw_fire_wood_heap:^value 2 = initial wood amount^^\
pw_fish_school:^value 1 = maximum depth (in meters)^value 2 = maximum fish count^^\
pw_herd_animal_spawn:^value 1 = animal item offset starting with 1 for deer, 0 or invalid for a random animal^value 2 = approximate time between spawns in hours, minimum 1^^\
-----------------------------------------------------------^\
^The values can be modified in the scene editor panel, scene prop section: the two fields, labelled 'Var No' and 'Var 2 No'. These can each store an integer in the range 0 - 127.^\
^The scene props with 'value 1 = faction + multiplier' share the same code for storing a combination of faction id / castle id and gold value multiplier in value 1:^\
0 is the commoner faction, which normally means the prop is not associated with any faction, 1 for outlaws, and 2 - 9 associate the scene prop with a castle (which starts owned by the corresponding faction id).^\
The other part is a multiple of 10, representing specific gold value multipliers:^\
0 = 100%, the default value^10 = 20%^20 = 40%^30 = 60%^40 = 80%^50 = 120%^60 = 140%^70 = 160%^80 = 180%^90 = 200%^100 = 350%^110 = 500%^120 = 1000%^\
These two separate values are combined: for example, 31 = outlaw faction and 60% value, 116 = castle 5 (starts owned by faction 5, the yellow faction) and 500% value.^\
^The scene props with 'value 2 = linking id with x' mean that they are designed to be linked to another scene prop, x representing the first scene prop's name:^\
On scene load, the props are linked by searching for the nearest prop in the scene of the specified type, with the same value 2, and that hasn't already been linked; so for example, with a teleport door pair on opposite sides of the scene, you could set the value 2 of both to 53 and not use that number for any of the other props of the same type, so they are linked correctly even when not close together, or if someone else adds more props of the same type in between."),
("pw_editor_castle_names", "Castle names with numbers^-----------------------------------------------------------^^{s2}"),
("castle_names_numbers_format", "{reg1} = {s1}^{s0}"),
("book_of_clothing", "The Book of Tailoring Clothes^^^"),
("book_of_weapons", "The Book of Crafting Weaponry^^^"),
("book_of_armor", "The Book of Crafting Armor^^^"),
("book_of_healing", "The Book of Healing^^^\
Healing arts are difficult to master, requiring many hours careful study as a medical doctor for the best results; though certain peasant healers learn unconventional methods passed down from the generations before them, that give limited results. For a dangerously hurt patient, whether man or beast, it is generally required to use a carefully crafted surgeon's scalpel to operate on the wound, but for moderate wounds rest and food is often all that is required for a complete recovery.^^\
Healing herbs found in certain locations can be cut from the bushes with a knife, then applied to a poisoned wound as an antidote; larger amounts of poison absorbed could require multiple herb leaves. The healing herbs have long green leaves branching off in finger shaped patterns, with bunches of small blue flowers near the top.^\
Poisonous herbs can also sometimes be found, which are then applied to the blades of certain weapons used for murderous deeds; they have many brown stalks covered in tiny green leaves."),
("name_server_error_code_reg0", "Name server: error code {reg0}."),
("name_server_input_error_parameter_s0", "Name server: input error with parameter '{s0}'."),
("name_server_invalid_response", "Name server: invalid response '{reg0}|{reg1}|{reg2}|{s0}|{reg3}'."),
("name_server_log_s10", "Name server: player {s0} ({reg2}): {s10}"),
("kicked_using_other_players_name", "Kicked for using another player's name."),
("kicked_using_other_clan_tag", "Kicked for using another clan's tag."),
("kicked_using_invalid_name", "Kicked for using invalid characters in the name."),
("kicked_not_registered", "Kicked since not registered for this server."),
("http_s1_password_s2_id_reg1_uid_reg2_name_s3", "http://{s1}/checkplayer.php?password={s2}&id={reg1}&uid={reg2}&name={s3}"),
("http_s0_admin", "{s0}&admin"),
("name_server", "localhost/pwnameserver"),
("name_server_password", "WD915Kyi18"),
("scene_name_1", "Default Scene"),
("scene_name_2", "Blank Scene 2"),
("scene_name_3", "Blank Scene 3"),
("scene_name_4", "Blank Scene 4"),
("scene_name_5", "Blank Scene 5"),
("scene_name_6", "Blank Scene 6"),
("scene_name_7", "Blank Scene 7"),
("scene_name_8", "Blank Scene 8"),
("scene_name_9", "Blank Scene 9"),
("scene_name_10", "Basin of Ice"),
("scene_name_11", "Across the River"),
("scene_name_12", "Game of Thrones"),
("scene_name_13", "Dominion"),
("scene_name_14", "Titan Isle"),
("scene_names_end", "scene_names_end"),
("castle_name_0", "Laszloburg Castle"),
("castle_name_1", "Vornneston Castle"),
("castle_name_2", "Hakrholm Castle"),
("castle_name_3", "Pordisett Castle"),
("castle_name_4", "Bernstead Castle"),
("castle_name_5", "Osvirklif Castle"),
("castle_name_6", "Montibeil Castle"),
("castle_name_7", "Zavansk Castle"),
("castle_name_8", "Valdemel Castle"),
("castle_name_9", "Berwinwic Castle"),
("castle_name_10", "Praven Castle"),
("castle_name_11", "Jelkala Castle"),
("castle_name_12", "Sargoth Castle"),
("castle_name_13", "Shariz Castle"),
("castle_name_14", "Volcain Mine"),
("castle_name_15", "Bono Mine"),
("castle_name_16", "Pilmir Mine"),
("castle_name_17", "Mount Hellstone"),
("castle_name_18", "Jerusalem"),
("castle_name_19", "Milturn Village"),
("castle_name_20", "Pinecrest Village"),
("castle_name_21", "Burglen Village"),
("castle_name_22", "Serindiar Village"),
("castle_name_23", "Richfield Village"),
("castle_name_24", "Dusturil Village"),
("castle_name_25", "Retden Village"),
("castle_name_26", "Crusader Castle"),
("castle_name_27", "Oasis"),
("castle_name_28", "Sarranid Castle"),
("castle_name_29", "Old Port"),
("castle_name_30", "Laras Outpost"),
("castle_name_31", "Tandcot Outpost"),
("castle_name_32", "Durifell Outpost"),
("castle_name_33", "Brackaltwald Outpost"),
("castle_name_34", "Avalon"),
("castle_name_35", "Camelot"),
("castle_name_36", "City of Camelot"),
("castle_name_37", "Troll Cave"),
("castle_name_38", "Riverhigh Mine"),
("castle_name_39", "Iron Abbey"),
("castle_name_40", "Imperial Prison"),
("castle_name_41", "Rangers Guild"),
("castle_name_42", "Helheim Hold"),
("castle_name_43", "Springvale Hold"),
("castle_name_44", "Hollow Bastion Hold"),
("castle_name_45", "Hellwood Hold"),
("castle_name_46", "Fjorgyn Hold"),
("castle_name_47", "Sessrumnir Hold"),
("castle_name_48", "New Zendar"),
("castle_name_49", "Glunmar Village"),
("castle_name_50", "Town Watch Headquarters"),
("castle_name_51", "Bluvard Bank"),
("castle_name_52", "Underworld Caverns"),
("castle_name_53", "Whiteforge Harbour"),
("castle_name_54", "Whiteforge Guardhouse"),
("castle_name_55", "Saint Michael Abbey"),
("castle_name_56", "Nordheim"),
("castle_name_57", "Nordheim Castle"),
("castle_name_58", "Farendal"),
("castle_name_59", "Madvakt Castle"),
("castle_name_60", "Bredby"),
("castle_name_61", "Kildevakt Castle"),
("castle_name_62", "Saltklippen"),
("castle_name_63", "Nantes"),
("castle_name_64", "Irontown"),
("castle_name_65", "Halmar"),
("castle_name_66", "Brunwud Castle"),
("castle_names_end", "Invalid Castle Name"),
]
|
import { addRoute, getSetting} from 'meteor/vulcan:core';
// example-forum routes
addRoute([
{name:'posts.daily', path:'daily', componentName: 'PostsDaily', title: "Posts by Day" },
{name:'users.single', path:'users/:slug', componentName: 'UsersSingle'},
{name:'users.account', path:'account', componentName: 'UsersAccount'},
{name:'users.edit', path:'users/:slug/edit', componentName: 'UsersAccount'}
]);
// Miscellaneous LW2 routes
addRoute({ name: 'login', path: '/login', componentName: 'LoginPage', title: "Login" });
addRoute({ name: 'inbox', path: '/inbox', componentName: 'InboxWrapper', title: "Inbox" });
addRoute({ name: 'newPost', path: '/newPost', componentName: 'PostsNewForm', title: "New Post" });
addRoute({ name: 'editPost', path: '/editPost', componentName: 'PostsEditForm' });
addRoute({ name: 'recentComments', path: '/recentComments', componentName: 'RecentCommentsPage', title: "Recent Comments" });
// Sequences
addRoute({ name: 'sequencesHome', path: '/library', componentName: 'SequencesHome', title: "The Library" });
addRoute({ name: 'sequences.single.old', path: '/sequences/:_id', componentName: 'SequencesSingle' });
addRoute({ name: 'sequences.single', path: '/s/:_id', componentName: 'SequencesSingle' });
addRoute({ name: 'sequencesEdit', path: '/sequencesEdit/:_id', componentName: 'SequencesEditForm'});
addRoute({ name: 'sequencesNew', path: '/sequencesNew', componentName: 'SequencesNewForm', title: "New Sequence" });
addRoute({ name: 'sequencesPost', path: '/s/:sequenceId/p/:postId', componentName: 'SequencesPost'});
addRoute({ name: 'chaptersEdit', path: '/chaptersEdit/:_id', componentName: 'ChaptersEditForm', title: "Edit Chapter"});
// Collections
addRoute({ name: 'collections', path: '/collections/:_id', componentName: 'CollectionsSingle' });
addRoute({ name: 'Sequences', path: '/sequences', componentName: 'CoreSequences', title: "Rationality: A-Z" })
addRoute({ name: 'Rationality', path: '/rationality', componentName: 'CoreSequences', title: "Rationality: A-Z" })
addRoute({ name: 'Rationality.posts.single', path: '/rationality/:slug', componentName: 'PostsSingleSlugWrapper'})
addRoute({ name: 'HPMOR', path: '/hpmor', componentName: 'HPMOR', title: "Harry Potter and the Methods of Rationality" })
addRoute({ name: 'HPMOR.posts.single', path: '/hpmor/:slug', componentName: 'PostsSingleSlugWrapper'})
addRoute({ name: 'Codex', path: '/codex', componentName: 'Codex', title: "The Codex"})
addRoute({ name: 'Codex.posts.single', path: '/codex/:slug', componentName: 'PostsSingleSlugWrapper'})
//Route for testing the editor. Useful for debugging
addRoute({ name: 'searchTest', path: '/searchTest', componentName: 'SearchBar'});
addRoute({ name: 'postsListEditorTest', path:'/postsListEditorTest', componentName: 'PostsListEditor'})
addRoute({ name: 'imageUploadTest', path: '/imageUpload', componentName: 'ImageUpload'});
addRoute({name:'posts.single', path:'posts/:_id(/:slug)', componentName: 'PostsSingle'});
addRoute({name:'Localgroups.single', path:'groups/:groupId', componentName: 'LocalGroupSingle'});
addRoute({name:'events.single', path:'events/:_id(/:slug)', componentName: 'PostsSingle'});
addRoute({ name: 'groups.post', path: '/g/:groupId/p/:_id', componentName: 'PostsSingle'});
addRoute({ name: 'admin', path: '/admin', componentName: 'AdminHome', title: "Admin" });
addRoute({ name: 'moderation', path: '/moderation', componentName: 'ModerationLog', title: "Moderation Log" });
addRoute({name:'about', path:'/about', componentName: 'PostsSingleRoute', _id:"C6HnNvkYTZpktFzdJ"});
if(getSetting('AlignmentForum', false)) {
addRoute({name:'alignment.home', path:'/', componentName: 'AlignmentForumHome'});
} else {
addRoute({name: 'home', path: '/', componentName: 'Home'});
}
|
import os
import re
from setuptools import setup
def get_long_description():
"""
Return the README.
"""
return open("README.md", "r", encoding="utf8").read()
def get_packages(package):
"""
Return root package and all sub-packages.
"""
return [
dirpath
for dirpath, dirnames, filenames in os.walk(package)
if os.path.exists(os.path.join(dirpath, "__init__.py"))
]
def get_version(package):
"""
Return package version as listed in `__version__` in `init.py`.
"""
path = os.path.join(package, "__init__.py")
init_py = open(path, "r", encoding="utf8").read()
return re.search("__version__ = ['\"]([^'\"]+)['\"]", init_py).group(1)
setup(
name="issuelab",
version=get_version("issuelab"),
author="Tafil Avdyli",
author_email="tafil@tafhub.de",
description="Migrate issue boards",
long_description=get_long_description(),
long_description_content_type="text/markdown",
url="https://github.com/tafilz/issuelab",
packages=get_packages("issuelab"),
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
install_requires=["requests", "python-gitlab", "youtrack"]
)
|