text
stringlengths 3
1.05M
|
|---|
import argparse
import os
class FsExistsType:
def __call__(self, prospective_dir):
if not os.path.exists(prospective_dir):
raise argparse.ArgumentTypeError("{0} does not exist".format(prospective_dir))
return prospective_dir
|
#!/usr/bin/python
# (c) 2019, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = '''
module: na_ontap_object_store
short_description: NetApp ONTAP manage object store config.
extends_documentation_fragment:
- netapp.na_ontap
version_added: '2.9'
author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com>
description:
- Create or delete object store config on ONTAP.
options:
state:
description:
- Whether the specified object store config should exist or not.
choices: ['present', 'absent']
default: 'present'
type: str
name:
required: true
description:
- The name of the object store config to manage.
type: str
provider_type:
required: false
description:
- The name of the object store config provider.
type: str
server:
required: false
description:
- Fully qualified domain name of the object store config.
type: str
container:
required: false
description:
- Data bucket/container name used in S3 requests.
type: str
access_key:
required: false
description:
- Access key ID for AWS_S3 and SGWS provider types.
type: str
secret_password:
required: false
description:
- Secret access key for AWS_S3 and SGWS provider types.
type: str
'''
EXAMPLES = """
- name: object store Create
na_ontap_object_store:
state: present
name: ansible
provider_type: SGWS
server: abc
container: abc
access_key: s3.amazonaws.com
secret_password: abc
hostname: "{{ hostname }}"
username: "{{ username }}"
password: "{{ password }}"
- name: object store Create
na_ontap_object_store:
state: absent
name: ansible
hostname: "{{ hostname }}"
username: "{{ username }}"
password: "{{ password }}"
"""
RETURN = """
"""
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
import ansible.module_utils.netapp as netapp_utils
from ansible.module_utils.netapp_module import NetAppModule
HAS_NETAPP_LIB = netapp_utils.has_netapp_lib()
class NetAppOntapObjectStoreConfig(object):
''' object initialize and class methods '''
def __init__(self):
self.argument_spec = netapp_utils.na_ontap_host_argument_spec()
self.argument_spec.update(dict(
name=dict(required=True, type='str'),
state=dict(required=False, choices=['present', 'absent'], default='present'),
provider_type=dict(required=False, type='str'),
server=dict(required=False, type='str'),
container=dict(required=False, type='str'),
access_key=dict(required=False, type='str'),
secret_password=dict(required=False, type='str', no_log=True)
))
self.module = AnsibleModule(
argument_spec=self.argument_spec,
supports_check_mode=True
)
self.na_helper = NetAppModule()
self.parameters = self.na_helper.set_parameters(self.module.params)
if HAS_NETAPP_LIB is False:
self.module.fail_json(msg="the python NetApp-Lib module is required")
else:
self.server = netapp_utils.setup_na_ontap_zapi(module=self.module)
def get_aggr_object_store(self):
"""
Fetch details if object store config exists.
:return:
Dictionary of current details if object store config found
None if object store config is not found
"""
aggr_object_store_get_iter = netapp_utils.zapi.NaElement.create_node_with_children(
'aggr-object-store-config-get', **{'object-store-name': self.parameters['name']})
result = None
try:
result = self.server.invoke_successfully(aggr_object_store_get_iter, enable_tunneling=False)
except netapp_utils.zapi.NaApiError as error:
# Error 15661 denotes an object store not being found.
if to_native(error.code) == "15661":
pass
else:
self.module.fail_json(msg=to_native(error), exception=traceback.format_exc())
return result
def create_aggr_object_store(self):
"""
Create aggregate object store config
:return: None
"""
required_keys = set(['provider_type', 'server', 'container', 'access_key'])
if not required_keys.issubset(set(self.parameters.keys())):
self.module.fail_json(msg='Error provisioning object store %s: one of the following parameters are missing '
'%s' % (self.parameters['name'], ', '.join(required_keys)))
options = {'object-store-name': self.parameters['name'],
'provider-type': self.parameters['provider_type'],
'server': self.parameters['server'],
's3-name': self.parameters['container'],
'access-key': self.parameters['access_key']}
if self.parameters.get('secret_password'):
options['secret-password'] = self.parameters['secret_password']
object_store_create = netapp_utils.zapi.NaElement.create_node_with_children('aggr-object-store-config-create', **options)
try:
self.server.invoke_successfully(object_store_create, enable_tunneling=False)
except netapp_utils.zapi.NaApiError as error:
self.module.fail_json(msg="Error provisioning object store config %s: %s"
% (self.parameters['name'], to_native(error)),
exception=traceback.format_exc())
def delete_aggr_object_store(self):
"""
Delete aggregate object store config
:return: None
"""
object_store_destroy = netapp_utils.zapi.NaElement.create_node_with_children(
'aggr-object-store-config-delete', **{'object-store-name': self.parameters['name']})
try:
self.server.invoke_successfully(object_store_destroy,
enable_tunneling=False)
except netapp_utils.zapi.NaApiError as error:
self.module.fail_json(msg="Error removing object store config %s: %s" %
(self.parameters['name'], to_native(error)), exception=traceback.format_exc())
def asup_log_for_cserver(self, event_name):
"""
Fetch admin vserver for the given cluster
Create and Autosupport log event with the given module name
:param event_name: Name of the event log
:return: None
"""
results = netapp_utils.get_cserver(self.server)
cserver = netapp_utils.setup_na_ontap_zapi(module=self.module, vserver=results)
netapp_utils.ems_log_event(event_name, cserver)
def apply(self):
"""
Apply action to the object store config
:return: None
"""
self.asup_log_for_cserver("na_ontap_object_store_config")
current = self.get_aggr_object_store()
cd_action = self.na_helper.get_cd_action(current, self.parameters)
if self.na_helper.changed:
if self.module.check_mode:
pass
else:
if cd_action == 'create':
self.create_aggr_object_store()
elif cd_action == 'delete':
self.delete_aggr_object_store()
self.module.exit_json(changed=self.na_helper.changed)
def main():
"""
Create Object Store Config class instance and invoke apply
:return: None
"""
obj_store = NetAppOntapObjectStoreConfig()
obj_store.apply()
if __name__ == '__main__':
main()
|
$(document).ready(function () {
$("#pageTitle").html("");
$("#pageTitle").html("Create New Policy");
$("#btnGoback").on("click", function () {
window.location.href = "/admin/policies/list";
});
})
|
from ._architecture import Architecture
from . import _generator_base
from . import _generator_vs
from ._build_options import BuildOptions
from ._helpers import _check_type
from ._toolchain import Toolchain
class Configuration(object):
def __init__(self, generator, name, name_build, build_options, toolchain, architecture):
_check_type(generator,"Generator",[_generator_vs.GeneratorVS2015,_generator_vs.GeneratorVS2017])
self.generator = generator
self.generator.configurations.append(self)
_check_type(name,"Configuration name",str)
self.name = name
_check_type(name_build,"Configuration build name",str)
self.name_build = name_build
self.additional_include_directories = []
_check_type(build_options,"Configuration build options",BuildOptions)
self.build_options = build_options
_check_type(toolchain,"Configuration toolchain",Toolchain)
self.toolchain = toolchain
_check_type(architecture,"Configuration architecture",Architecture)
self.architecture = architecture
def _getMSVCConfigPlat(self):
#Return something like "debug|Win32"
return "%s|%s" % (
self.name,
["Win32","x64","ARM"][int(self.architecture.type)]
)
def _getMSVCName (self):
#Return something like "debug|x86".
return "%s|%s" % (
self.name,
["x86","x64","ARM"][int(self.architecture.type)]
)
def _getMSVCArch (self):
return ["Win32","x64","ARM"][int(self.architecture.type)]
def _getMSVCArchName(self):
return ["x86", "x64","ARM"][int(self.architecture.type)]
|
import requests
import pytest
@pytest.fixture(scope="session")
def metrics():
pass
def test_annual_commit_count_ranked_by_new_repo_in_repo_group(metrics):
response = requests.get('http://localhost:5000/api/unstable/repo-groups/20/annual-commit-count-ranked-by-new-repo-in-repo-group/')
data = response.json()
assert response.status_code == 200
assert len(data) >= 1
assert data[0]["net"] >= 0
def test_annual_commit_count_ranked_by_new_repo_in_repo_group_by_repo(metrics):
response = requests.get('http://localhost:5000/api/unstable/repo-groups/20/repos/21000/annual-commit-count-ranked-by-new-repo-in-repo-group')
data = response.json()
assert response.status_code == 200
assert len(data) >= 1
assert data[0]["net"] > 0
def test_annual_commit_count_ranked_by_new_repo_in_repo_group_by_group(metrics):
response = requests.get('http://localhost:5000/api/unstable/repo-groups/20/annual-commit-count-ranked-by-new-repo-in-repo-group')
data = response.json()
assert response.status_code == 200
assert len(data) >= 1
assert data[0]["net"] > 0
def test_annual_commit_count_ranked_by_repo_in_repo_group_by_repo(metrics):
response = requests.get('http://localhost:5000/api/unstable/repo-groups/20/repos/21000/annual-commit-count-ranked-by-repo-in-repo-group')
data = response.json()
assert response.status_code == 200
assert len(data) >= 1
assert data[0]["net"] > 0
def test_annual_commit_count_ranked_by_repo_in_repo_group_by_group(metrics):
response = requests.get('http://localhost:5000/api/unstable/repo-groups/20/annual-commit-count-ranked-by-repo-in-repo-group')
data = response.json()
assert response.status_code == 200
assert len(data) >= 1
assert data[0]["net"] > 0
def test_committer_data(metrics):
response = requests.get('http://localhost:5000/api/unstable/repo-groups/20/committer-data', timeout=(10, 120))
data = response.json()
assert response.status_code == 200
assert len(data) >= 1
|
module.exports = {
networks: {
development: {
host: "127.0.0.1",
port: 7545,
network_id: "*"
},
ropsten: {
host: "localhost",
port: 8545,
network_id: 3,
gas: 4700000
},
main: {
host: "localhost",
port: 8545,
network_id: 1,
gas: 4700000,
gasPrice: 7
}
}
};
|
const path = require('path')
const ghpages = require('gh-pages')
const config = require('config')
const chalk = require('chalk')
const token = process.env.GH_TOKEN
const date = new Date().toISOString()
const { github } = config.get('deploy')
let remoteURL = ''
if (token) {
remoteURL = `https://${token}@github.com/${github.username}/${github.repo}.git`
console.log(`${chalk.blue('info')} All files will be Published in https://YOUR_TOKEN@github.com/${github.username}/${github.repo}.git`)
} else {
remoteURL = `git@github.com:${github.username}/${github.repo}.git`
console.log(`${chalk.blue('info')} All files will be Published in git@github.com:${github.username}/${github.repo}.git`)
}
console.log(`${chalk.blue('info')} Branch: ${github.branch || 'gh-pages'}`)
ghpages.publish(path.resolve(__dirname, '../public'), {
branch: github.branch,
repo: remoteURL,
message: `:sparkles: Site updated at ${date}`,
}, (err) => {
if (err) {
console.error(err)
process.exit(1)
}
console.log(`${chalk.green('success')} Published.`)
console.log(`${chalk.green('success')} Visit https://github.com/${github.username}/${github.repo} to get more details.`)
})
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
__all__ = [
'validate_rbac_is_correctly_configured'
]
def validate_rbac_is_correctly_configured():
if cfg.CONF.rbac.enable and not cfg.CONF.auth.enable:
msg = ('Authentication is not enabled. RBAC only works when authentication is enabled. '
'You can either enable authentication or disable RBAC.')
raise ValueError(msg)
return True
|
# -*- coding: utf-8 -*-
import os
# url for a public google spreadsheet with all reminders texts as csv
CSV_URL = 'https://docs.google.com/spreadsheets/d/1rhZRohjtg3-yVXXbcvTcCgep93pCxbstJR-9gZe5XNU/pub?output=csv'
# Yo API Token for the account sending reminders (https://dev.justyo.co)
YO_API_TOKEN = os.environ.get('YO_API_TOKEN')
# connection string for a mongo db to store users states
MONGO_STRING = os.environ.get('MONGO_STRING')
|
from typing import Any, Dict, Optional, Union, cast
import httpx
from ...client import Client
from ...models.get_invoice_discounts_by_id_response_200 import (
GetInvoiceDiscountsByIdResponse200,
)
from ...types import Response
def _get_kwargs(
document_id: int,
*,
client: Client,
) -> Dict[str, Any]:
url = "{}/Invoice/{DocumentId}/getDiscounts".format(
client.base_url, DocumentId=document_id
)
headers: Dict[str, str] = client.get_headers()
cookies: Dict[str, Any] = client.get_cookies()
return {
"method": "get",
"url": url,
"headers": headers,
"cookies": cookies,
"timeout": client.get_timeout(),
}
def _parse_response(
*, response: httpx.Response
) -> Optional[Union[Any, GetInvoiceDiscountsByIdResponse200]]:
if response.status_code == 200:
response_200 = GetInvoiceDiscountsByIdResponse200.from_dict(response.json())
return response_200
if response.status_code == 400:
response_400 = cast(Any, None)
return response_400
if response.status_code == 401:
response_401 = cast(Any, None)
return response_401
if response.status_code == 500:
response_500 = cast(Any, None)
return response_500
return None
def _build_response(
*, response: httpx.Response
) -> Response[Union[Any, GetInvoiceDiscountsByIdResponse200]]:
return Response(
status_code=response.status_code,
content=response.content,
headers=response.headers,
parsed=_parse_response(response=response),
)
def sync_detailed(
document_id: int,
*,
client: Client,
) -> Response[Union[Any, GetInvoiceDiscountsByIdResponse200]]:
"""Get discounts by id
Args:
document_id (int):
Returns:
Response[Union[Any, GetInvoiceDiscountsByIdResponse200]]
"""
kwargs = _get_kwargs(
document_id=document_id,
client=client,
)
response = httpx.request(
verify=client.verify_ssl,
**kwargs,
)
return _build_response(response=response)
def sync(
document_id: int,
*,
client: Client,
) -> Optional[Union[Any, GetInvoiceDiscountsByIdResponse200]]:
"""Get discounts by id
Args:
document_id (int):
Returns:
Response[Union[Any, GetInvoiceDiscountsByIdResponse200]]
"""
return sync_detailed(
document_id=document_id,
client=client,
).parsed
async def asyncio_detailed(
document_id: int,
*,
client: Client,
) -> Response[Union[Any, GetInvoiceDiscountsByIdResponse200]]:
"""Get discounts by id
Args:
document_id (int):
Returns:
Response[Union[Any, GetInvoiceDiscountsByIdResponse200]]
"""
kwargs = _get_kwargs(
document_id=document_id,
client=client,
)
async with httpx.AsyncClient(verify=client.verify_ssl) as _client:
response = await _client.request(**kwargs)
return _build_response(response=response)
async def asyncio(
document_id: int,
*,
client: Client,
) -> Optional[Union[Any, GetInvoiceDiscountsByIdResponse200]]:
"""Get discounts by id
Args:
document_id (int):
Returns:
Response[Union[Any, GetInvoiceDiscountsByIdResponse200]]
"""
return (
await asyncio_detailed(
document_id=document_id,
client=client,
)
).parsed
|
const { detectNetworkName } = require('./detectNetwork');
const { connectContract, connectContracts } = require('./connectContract');
const {
knownMainnetWallet,
ensureAccountHasEther,
ensureAccountHasSNX,
ensureAccountHassUSD,
} = require('./ensureAccountHasBalance');
const { exchangeSynths } = require('./exchangeSynths');
const { readSetting, writeSetting } = require('./systemSettings');
const { skipWaitingPeriod, skipStakeTime } = require('./skipWaiting');
const { simulateExchangeRates } = require('./exchangeRates');
const { takeDebtSnapshot } = require('./debtSnapshot');
const { mockOptimismBridge } = require('./optimismBridge');
const { implementsVirtualSynths } = require('./virtualSynths');
module.exports = {
knownMainnetWallet,
detectNetworkName,
connectContract,
connectContracts,
ensureAccountHasEther,
ensureAccountHassUSD,
ensureAccountHasSNX,
exchangeSynths,
readSetting,
writeSetting,
skipWaitingPeriod,
skipStakeTime,
simulateExchangeRates,
takeDebtSnapshot,
mockOptimismBridge,
implementsVirtualSynths,
};
|
!(function(e, t) {
"object" == typeof exports && "undefined" != typeof module
? (module.exports = t())
: "function" == typeof define && define.amd
? define(t)
: (e.Sweetalert2 = t());
})(this, function() {
"use strict";
function q(e) {
return (q =
"function" == typeof Symbol && "symbol" == typeof Symbol.iterator
? function(e) {
return typeof e;
}
: function(e) {
return e &&
"function" == typeof Symbol &&
e.constructor === Symbol &&
e !== Symbol.prototype
? "symbol"
: typeof e;
})(e);
}
function a(e, t) {
if (!(e instanceof t))
throw new TypeError("Cannot call a class as a function");
}
function o(e, t) {
for (var n = 0; n < t.length; n++) {
var o = t[n];
(o.enumerable = o.enumerable || !1),
(o.configurable = !0),
"value" in o && (o.writable = !0),
Object.defineProperty(e, o.key, o);
}
}
function i(e, t, n) {
return t && o(e.prototype, t), n && o(e, n), e;
}
function r() {
return (r =
Object.assign ||
function(e) {
for (var t = 1; t < arguments.length; t++) {
var n = arguments[t];
for (var o in n)
Object.prototype.hasOwnProperty.call(n, o) &&
(e[o] = n[o]);
}
return e;
}).apply(this, arguments);
}
function s(e, t) {
if ("function" != typeof t && null !== t)
throw new TypeError(
"Super expression must either be null or a function"
);
(e.prototype = Object.create(t && t.prototype, {
constructor: { value: e, writable: !0, configurable: !0 }
})),
t && u(e, t);
}
function c(e) {
return (c = Object.setPrototypeOf
? Object.getPrototypeOf
: function(e) {
return e.__proto__ || Object.getPrototypeOf(e);
})(e);
}
function u(e, t) {
return (u =
Object.setPrototypeOf ||
function(e, t) {
return (e.__proto__ = t), e;
})(e, t);
}
function l(e, t, n) {
return (l = (function() {
if ("undefined" == typeof Reflect || !Reflect.construct) return !1;
if (Reflect.construct.sham) return !1;
if ("function" == typeof Proxy) return !0;
try {
return (
Date.prototype.toString.call(
Reflect.construct(Date, [], function() {})
),
!0
);
} catch (e) {
return !1;
}
})()
? Reflect.construct
: function(e, t, n) {
var o = [null];
o.push.apply(o, t);
var i = new (Function.bind.apply(e, o))();
return n && u(i, n.prototype), i;
}).apply(null, arguments);
}
function d(e, t) {
return !t || ("object" != typeof t && "function" != typeof t)
? (function(e) {
if (void 0 === e)
throw new ReferenceError(
"this hasn't been initialised - super() hasn't been called"
);
return e;
})(e)
: t;
}
function p(e, t, n) {
return (p =
"undefined" != typeof Reflect && Reflect.get
? Reflect.get
: function(e, t, n) {
var o = (function(e, t) {
for (
;
!Object.prototype.hasOwnProperty.call(e, t) &&
null !== (e = c(e));
);
return e;
})(e, t);
if (o) {
var i = Object.getOwnPropertyDescriptor(o, t);
return i.get ? i.get.call(n) : i.value;
}
})(e, t, n || e);
}
var t = "SweetAlert2:",
f = function(e) {
return Array.prototype.slice.call(e);
},
R = function(e) {
console.warn("".concat(t, " ").concat(e));
},
I = function(e) {
console.error("".concat(t, " ").concat(e));
},
n = [],
m = function(e) {
-1 === n.indexOf(e) && (n.push(e), R(e));
},
H = function(e) {
return "function" == typeof e ? e() : e;
},
D = function(e) {
return e && Promise.resolve(e) === e;
},
e = Object.freeze({
cancel: "cancel",
backdrop: "overlay",
close: "close",
esc: "esc",
timer: "timer"
}),
h = function(e) {
var t = {};
for (var n in e) t[e[n]] = "swal2-" + e[n];
return t;
},
_ = h([
"container",
"shown",
"height-auto",
"iosfix",
"popup",
"modal",
"no-backdrop",
"toast",
"toast-shown",
"toast-column",
"fade",
"show",
"hide",
"noanimation",
"close",
"title",
"header",
"content",
"actions",
"confirm",
"cancel",
"footer",
"icon",
"icon-text",
"image",
"input",
"file",
"range",
"select",
"radio",
"checkbox",
"label",
"textarea",
"inputerror",
"validation-message",
"progresssteps",
"activeprogressstep",
"progresscircle",
"progressline",
"loading",
"styled",
"top",
"top-start",
"top-end",
"top-left",
"top-right",
"center",
"center-start",
"center-end",
"center-left",
"center-right",
"bottom",
"bottom-start",
"bottom-end",
"bottom-left",
"bottom-right",
"grow-row",
"grow-column",
"grow-fullscreen",
"rtl"
]),
g = h(["success", "warning", "info", "question", "error"]),
b = { previousBodyPadding: null },
v = function(e, t) {
return e.classList.contains(t);
},
N = function(e) {
if ((e.focus(), "file" !== e.type)) {
var t = e.value;
(e.value = ""), (e.value = t);
}
},
y = function(e, t, n) {
e &&
t &&
("string" == typeof t && (t = t.split(/\s+/).filter(Boolean)),
t.forEach(function(t) {
e.forEach
? e.forEach(function(e) {
n ? e.classList.add(t) : e.classList.remove(t);
})
: n
? e.classList.add(t)
: e.classList.remove(t);
}));
},
z = function(e, t) {
y(e, t, !0);
},
W = function(e, t) {
y(e, t, !1);
},
U = function(e, t) {
for (var n = 0; n < e.childNodes.length; n++)
if (v(e.childNodes[n], t)) return e.childNodes[n];
},
K = function(e) {
(e.style.opacity = ""),
(e.style.display = e.id === _.content ? "block" : "flex");
},
F = function(e) {
(e.style.opacity = ""), (e.style.display = "none");
},
Z = function(e) {
return (
e &&
(e.offsetWidth || e.offsetHeight || e.getClientRects().length)
);
},
w = function() {
return document.body.querySelector("." + _.container);
},
C = function(e) {
var t = w();
return t ? t.querySelector("." + e) : null;
},
k = function() {
return C(_.popup);
},
x = function() {
var e = k();
return f(e.querySelectorAll("." + _.icon));
},
A = function() {
return C(_.title);
},
B = function() {
return C(_.content);
},
S = function() {
return C(_.image);
},
P = function() {
return C(_.progresssteps);
},
E = function() {
return C(_["validation-message"]);
},
L = function() {
return C(_.confirm);
},
O = function() {
return C(_.cancel);
},
Q = function() {
return C(_.actions);
},
Y = function() {
return C(_.footer);
},
$ = function() {
return C(_.close);
},
J = function() {
var e = f(
k().querySelectorAll(
'[tabindex]:not([tabindex="-1"]):not([tabindex="0"])'
)
).sort(function(e, t) {
return (
(e = parseInt(e.getAttribute("tabindex"))),
(t = parseInt(t.getAttribute("tabindex"))) < e
? 1
: e < t
? -1
: 0
);
}),
t = f(
k().querySelectorAll(
'a[href], area[href], input:not([disabled]), select:not([disabled]), textarea:not([disabled]), button:not([disabled]), iframe, object, embed, [tabindex="0"], [contenteditable], audio[controls], video[controls]'
)
).filter(function(e) {
return "-1" !== e.getAttribute("tabindex");
});
return (function(e) {
for (var t = [], n = 0; n < e.length; n++)
-1 === t.indexOf(e[n]) && t.push(e[n]);
return t;
})(e.concat(t)).filter(function(e) {
return Z(e);
});
},
T = function() {
return !M() && !document.body.classList.contains(_["no-backdrop"]);
},
M = function() {
return document.body.classList.contains(_["toast-shown"]);
},
j = function() {
return (
"undefined" == typeof window || "undefined" == typeof document
);
},
V = '\n <div aria-labelledby="'
.concat(_.title, '" aria-describedby="')
.concat(_.content, '" class="')
.concat(_.popup, '" tabindex="-1">\n <div class="')
.concat(_.header, '">\n <ul class="')
.concat(_.progresssteps, '"></ul>\n <div class="')
.concat(_.icon, " ")
.concat(
g.error,
'">\n <span class="swal2-x-mark"><span class="swal2-x-mark-line-left"></span><span class="swal2-x-mark-line-right"></span></span>\n </div>\n <div class="'
)
.concat(_.icon, " ")
.concat(g.question, '">\n <span class="')
.concat(
_["icon-text"],
'">?</span>\n </div>\n <div class="'
)
.concat(_.icon, " ")
.concat(g.warning, '">\n <span class="')
.concat(
_["icon-text"],
'">!</span>\n </div>\n <div class="'
)
.concat(_.icon, " ")
.concat(g.info, '">\n <span class="')
.concat(
_["icon-text"],
'">i</span>\n </div>\n <div class="'
)
.concat(_.icon, " ")
.concat(
g.success,
'">\n <div class="swal2-success-circular-line-left"></div>\n <span class="swal2-success-line-tip"></span> <span class="swal2-success-line-long"></span>\n <div class="swal2-success-ring"></div> <div class="swal2-success-fix"></div>\n <div class="swal2-success-circular-line-right"></div>\n </div>\n <img class="'
)
.concat(_.image, '" />\n <h2 class="')
.concat(_.title, '" id="')
.concat(_.title, '"></h2>\n <button type="button" class="')
.concat(_.close, '">×</button>\n </div>\n <div class="')
.concat(_.content, '">\n <div id="')
.concat(_.content, '"></div>\n <input class="')
.concat(_.input, '" />\n <input type="file" class="')
.concat(_.file, '" />\n <div class="')
.concat(
_.range,
'">\n <input type="range" />\n <output></output>\n </div>\n <select class="'
)
.concat(_.select, '"></select>\n <div class="')
.concat(_.radio, '"></div>\n <label for="')
.concat(_.checkbox, '" class="')
.concat(
_.checkbox,
'">\n <input type="checkbox" />\n <span class="'
)
.concat(_.label, '"></span>\n </label>\n <textarea class="')
.concat(_.textarea, '"></textarea>\n <div class="')
.concat(_["validation-message"], '" id="')
.concat(
_["validation-message"],
'"></div>\n </div>\n <div class="'
)
.concat(_.actions, '">\n <button type="button" class="')
.concat(
_.confirm,
'">OK</button>\n <button type="button" class="'
)
.concat(_.cancel, '">Cancel</button>\n </div>\n <div class="')
.concat(_.footer, '">\n </div>\n </div>\n')
.replace(/(^|\n)\s*/g, ""),
X = function(e) {
var t = w();
if (
(t &&
(t.parentNode.removeChild(t),
W(
[document.documentElement, document.body],
[_["no-backdrop"], _["toast-shown"], _["has-column"]]
)),
!j())
) {
var n = document.createElement("div");
(n.className = _.container), (n.innerHTML = V);
var o =
"string" == typeof e.target
? document.querySelector(e.target)
: e.target;
o.appendChild(n);
var i,
r = k(),
a = B(),
s = U(a, _.input),
c = U(a, _.file),
u = a.querySelector(".".concat(_.range, " input")),
l = a.querySelector(".".concat(_.range, " output")),
d = U(a, _.select),
p = a.querySelector(".".concat(_.checkbox, " input")),
f = U(a, _.textarea);
r.setAttribute("role", e.toast ? "alert" : "dialog"),
r.setAttribute(
"aria-live",
e.toast ? "polite" : "assertive"
),
e.toast || r.setAttribute("aria-modal", "true"),
"rtl" === window.getComputedStyle(o).direction &&
z(w(), _.rtl);
var m = function(e) {
De.isVisible() &&
i !== e.target.value &&
De.resetValidationMessage(),
(i = e.target.value);
};
return (
(s.oninput = m),
(c.onchange = m),
(d.onchange = m),
(p.onchange = m),
(f.oninput = m),
(u.oninput = function(e) {
m(e), (l.value = u.value);
}),
(u.onchange = function(e) {
m(e), (u.nextSibling.value = u.value);
}),
r
);
}
I("SweetAlert2 requires document to initialize");
},
G = function(e, t) {
if (!e) return F(t);
if (e instanceof HTMLElement) t.appendChild(e);
else if ("object" === q(e))
if (((t.innerHTML = ""), 0 in e))
for (var n = 0; n in e; n++)
t.appendChild(e[n].cloneNode(!0));
else t.appendChild(e.cloneNode(!0));
else e && (t.innerHTML = e);
K(t);
},
ee = (function() {
if (j()) return !1;
var e = document.createElement("div"),
t = {
WebkitAnimation: "webkitAnimationEnd",
OAnimation: "oAnimationEnd oanimationend",
animation: "animationend"
};
for (var n in t)
if (t.hasOwnProperty(n) && void 0 !== e.style[n]) return t[n];
return !1;
})(),
te = function(e) {
var t = Q(),
n = L(),
o = O();
if (
(e.showConfirmButton || e.showCancelButton ? K(t) : F(t),
e.showCancelButton ? (o.style.display = "inline-block") : F(o),
e.showConfirmButton ? n.style.removeProperty("display") : F(n),
(n.innerHTML = e.confirmButtonText),
(o.innerHTML = e.cancelButtonText),
n.setAttribute("aria-label", e.confirmButtonAriaLabel),
o.setAttribute("aria-label", e.cancelButtonAriaLabel),
(n.className = _.confirm),
z(n, e.confirmButtonClass),
(o.className = _.cancel),
z(o, e.cancelButtonClass),
e.buttonsStyling)
) {
z([n, o], _.styled),
e.confirmButtonColor &&
(n.style.backgroundColor = e.confirmButtonColor),
e.cancelButtonColor &&
(o.style.backgroundColor = e.cancelButtonColor);
var i = window
.getComputedStyle(n)
.getPropertyValue("background-color");
(n.style.borderLeftColor = i), (n.style.borderRightColor = i);
} else
W([n, o], _.styled),
(n.style.backgroundColor = n.style.borderLeftColor = n.style.borderRightColor =
""),
(o.style.backgroundColor = o.style.borderLeftColor = o.style.borderRightColor =
"");
},
ne = function(e) {
var t = B().querySelector("#" + _.content);
e.html
? G(e.html, t)
: e.text
? ((t.textContent = e.text), K(t))
: F(t);
},
oe = function(e) {
for (var t = x(), n = 0; n < t.length; n++) F(t[n]);
if (e.type)
if (-1 !== Object.keys(g).indexOf(e.type)) {
var o = De.getPopup().querySelector(
".".concat(_.icon, ".").concat(g[e.type])
);
K(o),
e.animation &&
z(o, "swal2-animate-".concat(e.type, "-icon"));
} else
I(
'Unknown type! Expected "success", "error", "warning", "info" or "question", got "'.concat(
e.type,
'"'
)
);
},
ie = function(e) {
var t = S();
e.imageUrl
? (t.setAttribute("src", e.imageUrl),
t.setAttribute("alt", e.imageAlt),
K(t),
e.imageWidth
? t.setAttribute("width", e.imageWidth)
: t.removeAttribute("width"),
e.imageHeight
? t.setAttribute("height", e.imageHeight)
: t.removeAttribute("height"),
(t.className = _.image),
e.imageClass && z(t, e.imageClass))
: F(t);
},
re = function(i) {
var r = P(),
a = parseInt(
null === i.currentProgressStep
? De.getQueueStep()
: i.currentProgressStep,
10
);
i.progressSteps && i.progressSteps.length
? (K(r),
(r.innerHTML = ""),
a >= i.progressSteps.length &&
R(
"Invalid currentProgressStep parameter, it should be less than progressSteps.length (currentProgressStep like JS arrays starts from 0)"
),
i.progressSteps.forEach(function(e, t) {
var n = document.createElement("li");
if (
(z(n, _.progresscircle),
(n.innerHTML = e),
t === a && z(n, _.activeprogressstep),
r.appendChild(n),
t !== i.progressSteps.length - 1)
) {
var o = document.createElement("li");
z(o, _.progressline),
i.progressStepsDistance &&
(o.style.width = i.progressStepsDistance),
r.appendChild(o);
}
}))
: F(r);
},
ae = function(e) {
var t = A();
e.titleText
? (t.innerText = e.titleText)
: e.title &&
("string" == typeof e.title &&
(e.title = e.title.split("\n").join("<br />")),
G(e.title, t));
},
se = function() {
null === b.previousBodyPadding &&
document.body.scrollHeight > window.innerHeight &&
((b.previousBodyPadding = parseInt(
window
.getComputedStyle(document.body)
.getPropertyValue("padding-right")
)),
(document.body.style.paddingRight =
b.previousBodyPadding +
(function() {
if (
"ontouchstart" in window ||
navigator.msMaxTouchPoints
)
return 0;
var e = document.createElement("div");
(e.style.width = "50px"),
(e.style.height = "50px"),
(e.style.overflow = "scroll"),
document.body.appendChild(e);
var t = e.offsetWidth - e.clientWidth;
return document.body.removeChild(e), t;
})() +
"px"));
},
ce = function() {
return !!window.MSInputMethodContext && !!document.documentMode;
},
ue = function() {
var e = w(),
t = k();
e.style.removeProperty("align-items"),
t.offsetTop < 0 && (e.style.alignItems = "flex-start");
},
le = {},
de = function(e, t) {
var n = w(),
o = k();
if (o) {
null !== e && "function" == typeof e && e(o),
W(o, _.show),
z(o, _.hide);
var i = function() {
M()
? pe(t)
: (new Promise(function(e) {
var t = window.scrollX,
n = window.scrollY;
(le.restoreFocusTimeout = setTimeout(function() {
le.previousActiveElement &&
le.previousActiveElement.focus
? (le.previousActiveElement.focus(),
(le.previousActiveElement = null))
: document.body && document.body.focus(),
e();
}, 100)),
void 0 !== t &&
void 0 !== n &&
window.scrollTo(t, n);
}).then(function() {
return pe(t);
}),
le.keydownTarget.removeEventListener(
"keydown",
le.keydownHandler,
{ capture: le.keydownListenerCapture }
),
(le.keydownHandlerAdded = !1)),
n.parentNode && n.parentNode.removeChild(n),
W(
[document.documentElement, document.body],
[
_.shown,
_["height-auto"],
_["no-backdrop"],
_["toast-shown"],
_["toast-column"]
]
),
T() &&
(null !== b.previousBodyPadding &&
((document.body.style.paddingRight =
b.previousBodyPadding),
(b.previousBodyPadding = null)),
(function() {
if (v(document.body, _.iosfix)) {
var e = parseInt(
document.body.style.top,
10
);
W(document.body, _.iosfix),
(document.body.style.top = ""),
(document.body.scrollTop = -1 * e);
}
})(),
"undefined" != typeof window &&
ce() &&
window.removeEventListener("resize", ue),
f(document.body.children).forEach(function(e) {
e.hasAttribute("data-previous-aria-hidden")
? (e.setAttribute(
"aria-hidden",
e.getAttribute(
"data-previous-aria-hidden"
)
),
e.removeAttribute(
"data-previous-aria-hidden"
))
: e.removeAttribute("aria-hidden");
}));
};
ee && !v(o, _.noanimation)
? o.addEventListener(ee, function e() {
o.removeEventListener(ee, e), v(o, _.hide) && i();
})
: i();
}
},
pe = function(e) {
null !== e &&
"function" == typeof e &&
setTimeout(function() {
e();
});
};
function fe(e) {
var t = function e() {
for (var t = arguments.length, n = new Array(t), o = 0; o < t; o++)
n[o] = arguments[o];
if (!(this instanceof e)) return l(e, n);
Object.getPrototypeOf(e).apply(this, n);
};
return (
(t.prototype = r(Object.create(e.prototype), { constructor: t })),
"function" == typeof Object.setPrototypeOf
? Object.setPrototypeOf(t, e)
: (t.__proto__ = e),
t
);
}
var me = {
title: "",
titleText: "",
text: "",
html: "",
footer: "",
type: null,
toast: !1,
customClass: "",
customContainerClass: "",
target: "body",
backdrop: !0,
animation: !0,
heightAuto: !0,
allowOutsideClick: !0,
allowEscapeKey: !0,
allowEnterKey: !0,
stopKeydownPropagation: !0,
keydownListenerCapture: !1,
showConfirmButton: !0,
showCancelButton: !1,
preConfirm: null,
confirmButtonText: "OK",
confirmButtonAriaLabel: "",
confirmButtonColor: null,
confirmButtonClass: null,
cancelButtonText: "Cancel",
cancelButtonAriaLabel: "",
cancelButtonColor: null,
cancelButtonClass: null,
buttonsStyling: !0,
reverseButtons: !1,
focusConfirm: !0,
focusCancel: !1,
showCloseButton: !1,
closeButtonAriaLabel: "Close this dialog",
showLoaderOnConfirm: !1,
imageUrl: null,
imageWidth: null,
imageHeight: null,
imageAlt: "",
imageClass: null,
timer: null,
width: null,
padding: null,
background: null,
input: null,
inputPlaceholder: "",
inputValue: "",
inputOptions: {},
inputAutoTrim: !0,
inputClass: null,
inputAttributes: {},
inputValidator: null,
validationMessage: null,
grow: !1,
position: "center",
progressSteps: [],
currentProgressStep: null,
progressStepsDistance: null,
onBeforeOpen: null,
onAfterClose: null,
onOpen: null,
onClose: null,
useRejections: !1,
expectRejections: !1
},
he = ["useRejections", "expectRejections", "extraParams"],
ge = [
"allowOutsideClick",
"allowEnterKey",
"backdrop",
"focusConfirm",
"focusCancel",
"heightAuto",
"keydownListenerCapture"
],
be = function(e) {
return me.hasOwnProperty(e) || "extraParams" === e;
},
ve = function(e) {
return -1 !== he.indexOf(e);
},
ye = function(e) {
for (var t in e)
be(t) || R('Unknown parameter "'.concat(t, '"')),
e.toast &&
-1 !== ge.indexOf(t) &&
R(
'The parameter "'.concat(
t,
'" is incompatible with toasts'
)
),
ve(t) &&
m(
'The parameter "'.concat(
t,
'" is deprecated and will be removed in the next major release.'
)
);
},
we =
'"setDefaults" & "resetDefaults" methods are deprecated in favor of "mixin" method and will be removed in the next major release. For new projects, use "mixin". For past projects already using "setDefaults", support will be provided through an additional package.',
Ce = {};
var ke = [],
xe = function() {
var e = k();
e || De(""), (e = k());
var t = Q(),
n = L(),
o = O();
K(t),
K(n),
z([e, t], _.loading),
(n.disabled = !0),
(o.disabled = !0),
e.setAttribute("data-loading", !0),
e.setAttribute("aria-busy", !0),
e.focus();
},
Ae = Object.freeze({
isValidParameter: be,
isDeprecatedParameter: ve,
argsToParams: function(n) {
var o = {};
switch (q(n[0])) {
case "object":
r(o, n[0]);
break;
default:
["title", "html", "type"].forEach(function(e, t) {
switch (q(n[t])) {
case "string":
o[e] = n[t];
break;
case "undefined":
break;
default:
I(
"Unexpected type of "
.concat(
e,
'! Expected "string", got '
)
.concat(q(n[t]))
);
}
});
}
return o;
},
adaptInputValidator: function(n) {
return function(e, t) {
return n.call(this, e, t).then(
function() {},
function(e) {
return e;
}
);
};
},
close: de,
closePopup: de,
closeModal: de,
closeToast: de,
isVisible: function() {
return !!k();
},
clickConfirm: function() {
return L().click();
},
clickCancel: function() {
return O().click();
},
getContainer: w,
getPopup: k,
getTitle: A,
getContent: B,
getImage: S,
getIcons: x,
getCloseButton: $,
getButtonsWrapper: function() {
return (
m(
"swal.getButtonsWrapper() is deprecated and will be removed in the next major release, use swal.getActions() instead"
),
C(_.actions)
);
},
getActions: Q,
getConfirmButton: L,
getCancelButton: O,
getFooter: Y,
getFocusableElements: J,
getValidationMessage: E,
isLoading: function() {
return k().hasAttribute("data-loading");
},
fire: function() {
for (
var e = arguments.length, t = new Array(e), n = 0;
n < e;
n++
)
t[n] = arguments[n];
return l(this, t);
},
mixin: function(n) {
return fe(
(function(e) {
function t() {
return (
a(this, t), d(this, c(t).apply(this, arguments))
);
}
return (
s(t, e),
i(t, [
{
key: "_main",
value: function(e) {
return p(
c(t.prototype),
"_main",
this
).call(this, r({}, n, e));
}
}
]),
t
);
})(this)
);
},
queue: function(e) {
var r = this;
ke = e;
var a = function() {
(ke = []),
document.body.removeAttribute(
"data-swal2-queue-step"
);
},
s = [];
return new Promise(function(i) {
!(function t(n, o) {
n < ke.length
? (document.body.setAttribute(
"data-swal2-queue-step",
n
),
r(ke[n]).then(function(e) {
void 0 !== e.value
? (s.push(e.value), t(n + 1, o))
: (a(), i({ dismiss: e.dismiss }));
}))
: (a(), i({ value: s }));
})(0);
});
},
getQueueStep: function() {
return document.body.getAttribute("data-swal2-queue-step");
},
insertQueueStep: function(e, t) {
return t && t < ke.length ? ke.splice(t, 0, e) : ke.push(e);
},
deleteQueueStep: function(e) {
void 0 !== ke[e] && ke.splice(e, 1);
},
showLoading: xe,
enableLoading: xe,
getTimerLeft: function() {
return le.timeout && le.timeout.getTimerLeft();
},
stopTimer: function() {
return le.timeout && le.timeout.stop();
},
resumeTimer: function() {
return le.timeout && le.timeout.start();
},
toggleTimer: function() {
var e = le.timeout;
return e && (e.running ? e.stop() : e.start());
},
increaseTimer: function(e) {
return le.timeout && le.timeout.increase(e);
},
isTimerRunning: function() {
return le.timeout && le.timeout.isRunning();
}
}),
Be =
"function" == typeof Symbol
? Symbol
: (function() {
var t = 0;
function e(e) {
return (
"__" +
e +
"_" +
Math.floor(1e9 * Math.random()) +
"_" +
++t +
"__"
);
}
return (e.iterator = e("Symbol.iterator")), e;
})(),
Se =
"function" == typeof WeakMap
? WeakMap
: (function(n, o, t) {
function e() {
o(this, n, { value: Be("WeakMap") });
}
return (
(e.prototype = {
delete: function(e) {
delete e[this[n]];
},
get: function(e) {
return e[this[n]];
},
has: function(e) {
return t.call(e, this[n]);
},
set: function(e, t) {
o(e, this[n], { configurable: !0, value: t });
}
}),
e
);
})(Be("WeakMap"), Object.defineProperty, {}.hasOwnProperty),
Pe = { promise: new Se(), innerParams: new Se(), domCache: new Se() };
function Ee() {
var e = Pe.innerParams.get(this),
t = Pe.domCache.get(this);
e.showConfirmButton ||
(F(t.confirmButton), e.showCancelButton || F(t.actions)),
W([t.popup, t.actions], _.loading),
t.popup.removeAttribute("aria-busy"),
t.popup.removeAttribute("data-loading"),
(t.confirmButton.disabled = !1),
(t.cancelButton.disabled = !1);
}
function Le(e) {
var t = Pe.domCache.get(this);
t.validationMessage.innerHTML = e;
var n = window.getComputedStyle(t.popup);
(t.validationMessage.style.marginLeft = "-".concat(
n.getPropertyValue("padding-left")
)),
(t.validationMessage.style.marginRight = "-".concat(
n.getPropertyValue("padding-right")
)),
K(t.validationMessage);
var o = this.getInput();
o &&
(o.setAttribute("aria-invalid", !0),
o.setAttribute("aria-describedBy", _["validation-message"]),
N(o),
z(o, _.inputerror));
}
function Oe() {
var e = Pe.domCache.get(this);
e.validationMessage && F(e.validationMessage);
var t = this.getInput();
t &&
(t.removeAttribute("aria-invalid"),
t.removeAttribute("aria-describedBy"),
W(t, _.inputerror));
}
var Te = function e(t, n) {
a(this, e);
var o,
i,
r = n;
(this.running = !1),
(this.start = function() {
return (
this.running ||
((this.running = !0),
(i = new Date()),
(o = setTimeout(t, r))),
r
);
}),
(this.stop = function() {
return (
this.running &&
((this.running = !1),
clearTimeout(o),
(r -= new Date() - i)),
r
);
}),
(this.increase = function(e) {
var t = this.running;
return t && this.stop(), (r += e), t && this.start(), r;
}),
(this.getTimerLeft = function() {
return this.running && (this.stop(), this.start()), r;
}),
(this.isRunning = function() {
return this.running;
}),
this.start();
},
Me = {
email: function(e, t) {
return /^[a-zA-Z0-9.+_-]+@[a-zA-Z0-9.-]+\.[a-zA-Z0-9-]{2,24}$/.test(
e
)
? Promise.resolve()
: Promise.reject(
t && t.validationMessage
? t.validationMessage
: "Invalid email address"
);
},
url: function(e, t) {
return /^https?:\/\/(www\.)?[-a-zA-Z0-9@:%._+~#=]{2,256}\.[a-z]{2,63}\b([-a-zA-Z0-9@:%_+.~#?&//=]*)$/.test(
e
)
? Promise.resolve()
: Promise.reject(
t && t.validationMessage
? t.validationMessage
: "Invalid URL"
);
}
};
var je = function(e) {
var t = w(),
n = k();
null !== e.onBeforeOpen &&
"function" == typeof e.onBeforeOpen &&
e.onBeforeOpen(n),
e.animation
? (z(n, _.show), z(t, _.fade), W(n, _.hide))
: W(n, _.fade),
K(n),
(t.style.overflowY = "hidden"),
ee && !v(n, _.noanimation)
? n.addEventListener(ee, function e() {
n.removeEventListener(ee, e),
(t.style.overflowY = "auto");
})
: (t.style.overflowY = "auto"),
z([document.documentElement, document.body, t], _.shown),
e.heightAuto &&
e.backdrop &&
!e.toast &&
z([document.documentElement, document.body], _["height-auto"]),
T() &&
(se(),
(function() {
if (
/iPad|iPhone|iPod/.test(navigator.userAgent) &&
!window.MSStream &&
!v(document.body, _.iosfix)
) {
var e = document.body.scrollTop;
(document.body.style.top = -1 * e + "px"),
z(document.body, _.iosfix);
}
})(),
"undefined" != typeof window &&
ce() &&
(ue(), window.addEventListener("resize", ue)),
f(document.body.children).forEach(function(e) {
e === w() ||
(function(e, t) {
if ("function" == typeof e.contains)
return e.contains(t);
})(e, w()) ||
(e.hasAttribute("aria-hidden") &&
e.setAttribute(
"data-previous-aria-hidden",
e.getAttribute("aria-hidden")
),
e.setAttribute("aria-hidden", "true"));
}),
setTimeout(function() {
t.scrollTop = 0;
})),
M() ||
le.previousActiveElement ||
(le.previousActiveElement = document.activeElement),
null !== e.onOpen &&
"function" == typeof e.onOpen &&
setTimeout(function() {
e.onOpen(n);
});
};
var Ve,
qe = Object.freeze({
hideLoading: Ee,
disableLoading: Ee,
getInput: function(e) {
var t = Pe.innerParams.get(this),
n = Pe.domCache.get(this);
if (!(e = e || t.input)) return null;
switch (e) {
case "select":
case "textarea":
case "file":
return U(n.content, _[e]);
case "checkbox":
return n.popup.querySelector(
".".concat(_.checkbox, " input")
);
case "radio":
return (
n.popup.querySelector(
".".concat(_.radio, " input:checked")
) ||
n.popup.querySelector(
".".concat(_.radio, " input:first-child")
)
);
case "range":
return n.popup.querySelector(
".".concat(_.range, " input")
);
default:
return U(n.content, _.input);
}
},
enableButtons: function() {
var e = Pe.domCache.get(this);
(e.confirmButton.disabled = !1), (e.cancelButton.disabled = !1);
},
disableButtons: function() {
var e = Pe.domCache.get(this);
(e.confirmButton.disabled = !0), (e.cancelButton.disabled = !0);
},
enableConfirmButton: function() {
Pe.domCache.get(this).confirmButton.disabled = !1;
},
disableConfirmButton: function() {
Pe.domCache.get(this).confirmButton.disabled = !0;
},
enableInput: function() {
var e = this.getInput();
if (!e) return !1;
if ("radio" === e.type)
for (
var t = e.parentNode.parentNode.querySelectorAll(
"input"
),
n = 0;
n < t.length;
n++
)
t[n].disabled = !1;
else e.disabled = !1;
},
disableInput: function() {
var e = this.getInput();
if (!e) return !1;
if (e && "radio" === e.type)
for (
var t = e.parentNode.parentNode.querySelectorAll(
"input"
),
n = 0;
n < t.length;
n++
)
t[n].disabled = !0;
else e.disabled = !0;
},
showValidationMessage: Le,
resetValidationMessage: Oe,
resetValidationError: function() {
m(
"Swal.resetValidationError() is deprecated and will be removed in the next major release, use Swal.resetValidationMessage() instead"
),
Oe.bind(this)();
},
showValidationError: function(e) {
m(
"Swal.showValidationError() is deprecated and will be removed in the next major release, use Swal.showValidationMessage() instead"
),
Le.bind(this)(e);
},
getProgressSteps: function() {
return Pe.innerParams.get(this).progressSteps;
},
setProgressSteps: function(e) {
var t = r({}, Pe.innerParams.get(this), { progressSteps: e });
Pe.innerParams.set(this, t), re(t);
},
showProgressSteps: function() {
var e = Pe.domCache.get(this);
K(e.progressSteps);
},
hideProgressSteps: function() {
var e = Pe.domCache.get(this);
F(e.progressSteps);
},
_main: function(e) {
var T = this;
ye(e);
var M = r({}, me, e);
!(function(t) {
var e;
t.inputValidator ||
Object.keys(Me).forEach(function(e) {
t.input === e &&
(t.inputValidator = t.expectRejections
? Me[e]
: De.adaptInputValidator(Me[e]));
}),
t.validationMessage &&
("object" !== q(t.extraParams) &&
(t.extraParams = {}),
(t.extraParams.validationMessage =
t.validationMessage)),
(!t.target ||
("string" == typeof t.target &&
!document.querySelector(t.target)) ||
("string" != typeof t.target &&
!t.target.appendChild)) &&
(R(
'Target parameter is not valid, defaulting to "body"'
),
(t.target = "body")),
"function" == typeof t.animation &&
(t.animation = t.animation.call());
var n = k(),
o =
"string" == typeof t.target
? document.querySelector(t.target)
: t.target;
(e =
n && o && n.parentNode !== o.parentNode
? X(t)
: n || X(t)),
t.width &&
(e.style.width =
"number" == typeof t.width
? t.width + "px"
: t.width),
t.padding &&
(e.style.padding =
"number" == typeof t.padding
? t.padding + "px"
: t.padding),
t.background && (e.style.background = t.background);
for (
var i = window
.getComputedStyle(e)
.getPropertyValue("background-color"),
r = e.querySelectorAll(
"[class^=swal2-success-circular-line], .swal2-success-fix"
),
a = 0;
a < r.length;
a++
)
r[a].style.backgroundColor = i;
var s = w(),
c = $(),
u = Y();
if (
(ae(t),
ne(t),
"string" == typeof t.backdrop
? (w().style.background = t.backdrop)
: t.backdrop ||
z(
[document.documentElement, document.body],
_["no-backdrop"]
),
!t.backdrop &&
t.allowOutsideClick &&
R(
'"allowOutsideClick" parameter requires `backdrop` parameter to be set to `true`'
),
t.position in _
? z(s, _[t.position])
: (R(
'The "position" parameter is not valid, defaulting to "center"'
),
z(s, _.center)),
t.grow && "string" == typeof t.grow)
) {
var l = "grow-" + t.grow;
l in _ && z(s, _[l]);
}
t.showCloseButton
? (c.setAttribute("aria-label", t.closeButtonAriaLabel),
K(c))
: F(c),
(e.className = _.popup),
t.toast
? (z(
[document.documentElement, document.body],
_["toast-shown"]
),
z(e, _.toast))
: z(e, _.modal),
t.customClass && z(e, t.customClass),
t.customContainerClass && z(s, t.customContainerClass),
re(t),
oe(t),
ie(t),
te(t),
G(t.footer, u),
!0 === t.animation
? W(e, _.noanimation)
: z(e, _.noanimation),
t.showLoaderOnConfirm &&
!t.preConfirm &&
R(
"showLoaderOnConfirm is set to true, but preConfirm is not defined.\nshowLoaderOnConfirm should be used together with preConfirm, see usage example:\nhttps://sweetalert2.github.io/#ajax-request"
);
})(M),
Object.freeze(M),
Pe.innerParams.set(this, M),
le.timeout && (le.timeout.stop(), delete le.timeout),
clearTimeout(le.restoreFocusTimeout);
var j = {
popup: k(),
container: w(),
content: B(),
actions: Q(),
confirmButton: L(),
cancelButton: O(),
closeButton: $(),
validationMessage: E(),
progressSteps: P()
};
Pe.domCache.set(this, j);
var V = this.constructor;
return new Promise(function(t, n) {
var o = function(e) {
V.closePopup(M.onClose, M.onAfterClose),
M.useRejections ? t(e) : t({ value: e });
},
c = function(e) {
V.closePopup(M.onClose, M.onAfterClose),
M.useRejections ? n(e) : t({ dismiss: e });
},
u = function(e) {
V.closePopup(M.onClose, M.onAfterClose), n(e);
};
M.timer &&
(le.timeout = new Te(function() {
c("timer"), delete le.timeout;
}, M.timer)),
M.input &&
setTimeout(function() {
var e = T.getInput();
e && N(e);
}, 0);
for (
var l = function(t) {
if (
(M.showLoaderOnConfirm && V.showLoading(),
M.preConfirm)
) {
T.resetValidationMessage();
var e = Promise.resolve().then(function() {
return M.preConfirm(t, M.extraParams);
});
M.expectRejections
? e.then(
function(e) {
return o(e || t);
},
function(e) {
T.hideLoading(),
e &&
T.showValidationMessage(
e
);
}
)
: e.then(
function(e) {
Z(j.validationMessage) ||
!1 === e
? T.hideLoading()
: o(e || t);
},
function(e) {
return u(e);
}
);
} else o(t);
},
e = function(e) {
var t = e.target,
n = j.confirmButton,
o = j.cancelButton,
i = n && (n === t || n.contains(t)),
r = o && (o === t || o.contains(t));
switch (e.type) {
case "click":
if (i && V.isVisible())
if ((T.disableButtons(), M.input)) {
var a = (function() {
var e = T.getInput();
if (!e) return null;
switch (M.input) {
case "checkbox":
return e.checked
? 1
: 0;
case "radio":
return e.checked
? e.value
: null;
case "file":
return e.files
.length
? e.files[0]
: null;
default:
return M.inputAutoTrim
? e.value.trim()
: e.value;
}
})();
if (M.inputValidator) {
T.disableInput();
var s = Promise.resolve().then(
function() {
return M.inputValidator(
a,
M.extraParams
);
}
);
M.expectRejections
? s.then(
function() {
T.enableButtons(),
T.enableInput(),
l(a);
},
function(e) {
T.enableButtons(),
T.enableInput(),
e &&
T.showValidationMessage(
e
);
}
)
: s.then(
function(e) {
T.enableButtons(),
T.enableInput(),
e
? T.showValidationMessage(
e
)
: l(
a
);
},
function(e) {
return u(e);
}
);
} else
T.getInput().checkValidity()
? l(a)
: (T.enableButtons(),
T.showValidationMessage(
M.validationMessage
));
} else l(!0);
else
r &&
V.isVisible() &&
(T.disableButtons(),
c(V.DismissReason.cancel));
}
},
i = j.popup.querySelectorAll("button"),
r = 0;
r < i.length;
r++
)
(i[r].onclick = e),
(i[r].onmouseover = e),
(i[r].onmouseout = e),
(i[r].onmousedown = e);
if (
((j.closeButton.onclick = function() {
c(V.DismissReason.close);
}),
M.toast)
)
j.popup.onclick = function() {
M.showConfirmButton ||
M.showCancelButton ||
M.showCloseButton ||
M.input ||
c(V.DismissReason.close);
};
else {
var a = !1;
(j.popup.onmousedown = function() {
j.container.onmouseup = function(e) {
(j.container.onmouseup = void 0),
e.target === j.container && (a = !0);
};
}),
(j.container.onmousedown = function() {
j.popup.onmouseup = function(e) {
(j.popup.onmouseup = void 0),
(e.target === j.popup ||
j.popup.contains(e.target)) &&
(a = !0);
};
}),
(j.container.onclick = function(e) {
a
? (a = !1)
: e.target === j.container &&
H(M.allowOutsideClick) &&
c(V.DismissReason.backdrop);
});
}
M.reverseButtons
? j.confirmButton.parentNode.insertBefore(
j.cancelButton,
j.confirmButton
)
: j.confirmButton.parentNode.insertBefore(
j.confirmButton,
j.cancelButton
);
var s = function(e, t) {
for (var n = J(M.focusCancel), o = 0; o < n.length; o++)
return (
(e += t) === n.length
? (e = 0)
: -1 === e && (e = n.length - 1),
n[e].focus()
);
j.popup.focus();
};
le.keydownHandlerAdded &&
(le.keydownTarget.removeEventListener(
"keydown",
le.keydownHandler,
{ capture: le.keydownListenerCapture }
),
(le.keydownHandlerAdded = !1)),
M.toast ||
((le.keydownHandler = function(e) {
return (function(e, t) {
if (
(t.stopKeydownPropagation &&
e.stopPropagation(),
"Enter" !== e.key || e.isComposing)
)
if ("Tab" === e.key) {
for (
var n = e.target,
o = J(t.focusCancel),
i = -1,
r = 0;
r < o.length;
r++
)
if (n === o[r]) {
i = r;
break;
}
e.shiftKey ? s(i, -1) : s(i, 1),
e.stopPropagation(),
e.preventDefault();
} else
-1 !==
[
"ArrowLeft",
"ArrowRight",
"ArrowUp",
"ArrowDown",
"Left",
"Right",
"Up",
"Down"
].indexOf(e.key)
? document.activeElement ===
j.confirmButton &&
Z(j.cancelButton)
? j.cancelButton.focus()
: document.activeElement ===
j.cancelButton &&
Z(j.confirmButton) &&
j.confirmButton.focus()
: ("Escape" !== e.key &&
"Esc" !== e.key) ||
!0 !== H(t.allowEscapeKey) ||
(e.preventDefault(),
c(V.DismissReason.esc));
else if (
e.target &&
T.getInput() &&
e.target.outerHTML ===
T.getInput().outerHTML
) {
if (
-1 !==
["textarea", "file"].indexOf(
t.input
)
)
return;
V.clickConfirm(), e.preventDefault();
}
})(e, M);
}),
(le.keydownTarget = M.keydownListenerCapture
? window
: j.popup),
(le.keydownListenerCapture =
M.keydownListenerCapture),
le.keydownTarget.addEventListener(
"keydown",
le.keydownHandler,
{ capture: le.keydownListenerCapture }
),
(le.keydownHandlerAdded = !0)),
T.enableButtons(),
T.hideLoading(),
T.resetValidationMessage(),
M.toast && (M.input || M.footer || M.showCloseButton)
? z(document.body, _["toast-column"])
: W(document.body, _["toast-column"]);
for (
var d,
p,
f = [
"input",
"file",
"range",
"select",
"radio",
"checkbox",
"textarea"
],
m = function(e) {
(e.placeholder && !M.inputPlaceholder) ||
(e.placeholder = M.inputPlaceholder);
},
h = 0;
h < f.length;
h++
) {
var g = _[f[h]],
b = U(j.content, g);
if ((d = T.getInput(f[h]))) {
for (var v in d.attributes)
if (d.attributes.hasOwnProperty(v)) {
var y = d.attributes[v].name;
"type" !== y &&
"value" !== y &&
d.removeAttribute(y);
}
for (var w in M.inputAttributes)
("range" === f[h] && "placeholder" === w) ||
d.setAttribute(w, M.inputAttributes[w]);
}
(b.className = g),
M.inputClass && z(b, M.inputClass),
F(b);
}
switch (M.input) {
case "text":
case "email":
case "password":
case "number":
case "tel":
case "url":
(d = U(j.content, _.input)),
"string" == typeof M.inputValue ||
"number" == typeof M.inputValue
? (d.value = M.inputValue)
: D(M.inputValue) ||
R(
'Unexpected type of inputValue! Expected "string", "number" or "Promise", got "'.concat(
q(M.inputValue),
'"'
)
),
m(d),
(d.type = M.input),
K(d);
break;
case "file":
m((d = U(j.content, _.file))),
(d.type = M.input),
K(d);
break;
case "range":
var C = U(j.content, _.range),
k = C.querySelector("input"),
x = C.querySelector("output");
(k.value = M.inputValue),
(k.type = M.input),
(x.value = M.inputValue),
K(C);
break;
case "select":
var A = U(j.content, _.select);
if (((A.innerHTML = ""), M.inputPlaceholder)) {
var B = document.createElement("option");
(B.innerHTML = M.inputPlaceholder),
(B.value = ""),
(B.disabled = !0),
(B.selected = !0),
A.appendChild(B);
}
p = function(e) {
e.forEach(function(e) {
var t = e[0],
n = e[1],
o = document.createElement("option");
(o.value = t),
(o.innerHTML = n),
M.inputValue.toString() ===
t.toString() && (o.selected = !0),
A.appendChild(o);
}),
K(A),
A.focus();
};
break;
case "radio":
var S = U(j.content, _.radio);
(S.innerHTML = ""),
(p = function(e) {
e.forEach(function(e) {
var t = e[0],
n = e[1],
o = document.createElement("input"),
i = document.createElement("label");
(o.type = "radio"),
(o.name = _.radio),
(o.value = t),
M.inputValue.toString() ===
t.toString() &&
(o.checked = !0);
var r = document.createElement("span");
(r.innerHTML = n),
(r.className = _.label),
i.appendChild(o),
i.appendChild(r),
S.appendChild(i);
}),
K(S);
var t = S.querySelectorAll("input");
t.length && t[0].focus();
});
break;
case "checkbox":
var P = U(j.content, _.checkbox),
E = T.getInput("checkbox");
(E.type = "checkbox"),
(E.value = 1),
(E.id = _.checkbox),
(E.checked = Boolean(M.inputValue)),
(P.querySelector("span").innerHTML =
M.inputPlaceholder),
K(P);
break;
case "textarea":
var L = U(j.content, _.textarea);
(L.value = M.inputValue), m(L), K(L);
break;
case null:
break;
default:
I(
'Unexpected type of input! Expected "text", "email", "password", "number", "tel", "select", "radio", "checkbox", "textarea", "file" or "url", got "'.concat(
M.input,
'"'
)
);
}
if ("select" === M.input || "radio" === M.input) {
var O = function(e) {
return p(
((t = e),
(n = []),
"undefined" != typeof Map && t instanceof Map
? t.forEach(function(e, t) {
n.push([t, e]);
})
: Object.keys(t).forEach(function(e) {
n.push([e, t[e]]);
}),
n)
);
var t, n;
};
D(M.inputOptions)
? (V.showLoading(),
M.inputOptions.then(function(e) {
T.hideLoading(), O(e);
}))
: "object" === q(M.inputOptions)
? O(M.inputOptions)
: I(
"Unexpected type of inputOptions! Expected object, Map or Promise, got ".concat(
q(M.inputOptions)
)
);
} else
-1 !==
[
"text",
"email",
"number",
"tel",
"textarea"
].indexOf(M.input) &&
D(M.inputValue) &&
(V.showLoading(),
F(d),
M.inputValue
.then(function(e) {
(d.value =
"number" === M.input
? parseFloat(e) || 0
: e + ""),
K(d),
d.focus(),
T.hideLoading();
})
.catch(function(e) {
I("Error in inputValue promise: " + e),
(d.value = ""),
K(d),
d.focus(),
T.hideLoading();
}));
je(M),
M.toast ||
(H(M.allowEnterKey)
? M.focusCancel && Z(j.cancelButton)
? j.cancelButton.focus()
: M.focusConfirm && Z(j.confirmButton)
? j.confirmButton.focus()
: s(-1, 1)
: document.activeElement &&
"function" ==
typeof document.activeElement.blur &&
document.activeElement.blur()),
(j.container.scrollTop = 0);
});
}
});
function Re() {
if ("undefined" != typeof window) {
"undefined" == typeof Promise &&
I(
"This package requires a Promise library, please include a shim to enable it in this browser (See: https://github.com/sweetalert2/sweetalert2/wiki/Migration-from-SweetAlert-to-SweetAlert2#1-ie-support)"
),
(Ve = this);
for (var e = arguments.length, t = new Array(e), n = 0; n < e; n++)
t[n] = arguments[n];
var o = Object.freeze(this.constructor.argsToParams(t));
Object.defineProperties(this, {
params: { value: o, writable: !1, enumerable: !0 }
});
var i = this._main(this.params);
Pe.promise.set(this, i);
}
}
(Re.prototype.then = function(e, t) {
return Pe.promise.get(this).then(e, t);
}),
(Re.prototype.catch = function(e) {
return Pe.promise.get(this).catch(e);
}),
(Re.prototype.finally = function(e) {
return Pe.promise.get(this).finally(e);
}),
r(Re.prototype, qe),
r(Re, Ae),
Object.keys(qe).forEach(function(t) {
Re[t] = function() {
var e;
if (Ve) return (e = Ve)[t].apply(e, arguments);
};
}),
(Re.DismissReason = e),
(Re.noop = function() {});
var Ie,
He,
De = fe(
((Ie = Re),
(He = (function(e) {
function t() {
return a(this, t), d(this, c(t).apply(this, arguments));
}
return (
s(t, Ie),
i(
t,
[
{
key: "_main",
value: function(e) {
return p(
c(t.prototype),
"_main",
this
).call(this, r({}, Ce, e));
}
}
],
[
{
key: "setDefaults",
value: function(t) {
if ((m(we), !t || "object" !== q(t)))
throw new TypeError(
"SweetAlert2: The argument for setDefaults() is required and has to be a object"
);
ye(t),
Object.keys(t).forEach(function(e) {
Ie.isValidParameter(e) &&
(Ce[e] = t[e]);
});
}
},
{
key: "resetDefaults",
value: function() {
m(we), (Ce = {});
}
}
]
),
t
);
})()),
"undefined" != typeof window &&
"object" === q(window._swalDefaults) &&
He.setDefaults(window._swalDefaults),
He)
);
return (De.default = De);
}),
"undefined" != typeof window &&
window.Sweetalert2 &&
((window.Sweetalert2.version = "7.33.1"),
(window.swal = window.sweetAlert = window.Swal = window.SweetAlert =
window.Sweetalert2));
|
from __future__ import print_function
import sys
import numpy as np
import netCDF4 as nc
from .base_grid import BaseGrid
class Jra55RiverGrid(BaseGrid):
def __init__(self, h_grid_def, description='JRA55 river regular grid', calc_areas=True):
self.type = 'Arakawa A'
self.full_name = 'JRA55_river'
try:
with nc.Dataset(h_grid_def) as f:
try:
x_t = f.variables['longitude'][:]
y_t = f.variables['latitude'][:]
except KeyError:
x_t = f.variables['lon'][:]
y_t = f.variables['lat'][:]
except IOError:
print('Error opening {}'.format(h_grid_def), file=sys.stderr)
sys.exit(1)
super(Jra55RiverGrid, self).__init__(x_t=x_t, y_t=y_t,
description=description,
calc_areas=calc_areas)
def fix_pole_holes(self):
self.clat_t[2, -1, :] = 90.0
self.clat_t[3, -1, :] = 90.0
# Do South pole as well
self.clat_t[0, 0, :] = -90.0
self.clat_t[1, 0, :] = -90.0
|
import datetime
# 这个文件用来处理数据库返回的数据<class 'list'> list的元素是dict
test_list =[{'mlmethod': 'svc', 'total': 52, 'feamethod': 'sift', 'created': datetime.datetime(2017, 4, 25, 23, 12, 53), 'unitag': '1493133167337', 'id': 1, 'correct': 49, 'classify': 'glass'},
{'mlmethod': 'svc', 'total': 119, 'feamethod': 'sift', 'created': datetime.datetime(2017, 4, 25, 23, 13, 3), 'unitag': '1493133167337', 'id': 2, 'correct': 105, 'classify': 'car'},
{'mlmethod': 'svc', 'total': 44, 'feamethod': 'sift', 'created': datetime.datetime(2017, 4, 25, 23, 13, 7), 'unitag': '1493133167337', 'id': 3, 'correct': 39, 'classify': 'gun'},
{'mlmethod': 'svc', 'total': 63, 'feamethod': 'sift', 'created': datetime.datetime(2017, 4, 25, 23, 13, 13), 'unitag': '1493133167337', 'id': 4, 'correct': 56, 'classify': 'flowers'},
{'mlmethod': 'svc', 'total': 131, 'feamethod': 'sift', 'created': datetime.datetime(2017, 4, 25, 23, 13, 25), 'unitag': '1493133167337', 'id': 5, 'correct': 123, 'classify': 'worldcup'},
{'mlmethod': 'svc', 'total': 78, 'feamethod': 'sift', 'created': datetime.datetime(2017, 4, 25, 23, 13, 32), 'unitag': '1493133167337', 'id': 6, 'correct': 68, 'classify': 'fruits'},
{'mlmethod': 'svc', 'total': 59, 'feamethod': 'sift', 'created': datetime.datetime(2017, 4, 25, 23, 13, 37), 'unitag': '1493133167337', 'id': 7, 'correct': 57, 'classify': 'city'},
{'mlmethod': 'svc', 'total': 49, 'feamethod': 'sift', 'created': datetime.datetime(2017, 4, 25, 23, 13, 41), 'unitag': '1493133167337', 'id': 8, 'correct': 48, 'classify': 'dog'},
{'mlmethod': 'svc', 'total': 54, 'feamethod': 'sift', 'created': datetime.datetime(2017, 4, 25, 23, 13, 46), 'unitag': '1493133167337', 'id': 9, 'correct': 46, 'classify': 'fireworks'},
{'mlmethod': 'svc', 'total': 24, 'feamethod': 'sift', 'created': datetime.datetime(2017, 4, 25, 23, 13, 48), 'unitag': '1493133167337', 'id': 10, 'correct': 24, 'classify': 'earth'},
{'mlmethod': 'svc', 'total': 78, 'feamethod': 'sift', 'created': datetime.datetime(2017, 4, 25, 23, 13, 54), 'unitag': '1493133167337', 'id': 11, 'correct': 73, 'classify': 'sky'},
{'mlmethod': 'svc', 'total': 44, 'feamethod': 'sift', 'created': datetime.datetime(2017, 4, 25, 23, 13, 59), 'unitag': '1493133167337', 'id': 12, 'correct': 40, 'classify': 'gold'},
{'mlmethod': 'svc', 'total': 102, 'feamethod': 'sift', 'created': datetime.datetime(2017, 4, 25, 23, 14, 6), 'unitag': '1493133167337', 'id': 13, 'correct': 74, 'classify': 'plane'},
{'mlmethod': 'svc', 'total': 897, 'feamethod': 'sift', 'created': datetime.datetime(2017, 4, 25, 23, 14, 6), 'unitag': '1493133167337', 'id': 14, 'correct': 802, 'classify': 'total'}]
# 这个函数用于解析采用不同核函数的数据
def parse_ml_result(result_list):
svc_list=[]
rbf_list=[]
poly_list=[]
liner_list=[]
name = ('car', 'city', 'dog', 'fireworks', 'flowers',
'fruits', 'glass', 'gold', 'gun', 'plane', 'sky', 'worldcup')
llabel = ('svc', 'rbf_svc', 'poly_svc', 'lin_svc')
# 这里的每一个元素都是一条数据
for r_dict in result_list:
if r_dict['mlmethod']==llabel[0]:
for item in name:
if item==r_dict['classify']:
svc_list.insert(name.index(item),r_dict['correct'])
break
else:
continue
elif r_dict['mlmethod']==llabel[1]:
for item in name:
if item==r_dict['classify']:
rbf_list.insert(name.index(item),r_dict['correct'])
break
else:
continue
elif r_dict['mlmethod'] == llabel[2]:
for item in name:
if item == r_dict['classify']:
poly_list.insert(name.index(item), r_dict['correct'])
else:
continue
elif r_dict['mlmethod'] == llabel[3]:
for item in name:
if item == r_dict['classify']:
liner_list.insert(name.index(item), r_dict['correct'])
else:
continue
return svc_list, rbf_list, poly_list, liner_list, name, llabel
svc_list, rbf_list, poly_list, liner_list, _name, _llabel=parse_ml_result(test_list)
print(svc_list)
print(rbf_list)
print(poly_list)
print(liner_list)
# 这个函数用于解析采用不同特征提取的数据
def parse_fea_result(result_list):
sift_list = []
surf_list = []
orb_list = []
brisk_list = []
name = ('car', 'city', 'dog', 'fireworks', 'flowers',
'fruits', 'glass', 'gold', 'gun', 'plane', 'sky', 'worldcup')
llabel = ('sift', 'surf', 'orb', 'brisk')
# 这里的每一个元素都是一条数据
for r_dict in result_list:
if r_dict['mlmethod'] == llabel[0]:
for item in name:
if item == r_dict['classify']:
svc_list.insert(name.index(item), r_dict['correct'])
elif r_dict['mlmethod'] == llabel[1]:
for item in name:
if item == r_dict['classify']:
rbf_list.insert(name.index(item), r_dict['correct'])
elif r_dict['mlmethod'] == llabel[2]:
for item in name:
if item == r_dict['classify']:
poly_list.insert(name.index(item), r_dict['correct'])
elif r_dict['mlmethod'] == llabel[3]:
for item in name:
if item == r_dict['classify']:
liner_list.insert(name.index(item), r_dict['correct'])
return svc_list, rbf_list, poly_list, liner_list, name, llabel
# 用于解析summary表
def parse_summary(summary_list):
sift_list=[]
surf_list=[]
orb_list=[]
brisk_list=[]
name = ('sift', 'surf', 'orb', 'brisk')
llabel = ('svc', 'rbf', 'poly', 'lin')
# 这里的每一个元素都是一条数据
for r_dict in summary_list:
if r_dict['mlmethod']==llabel[0] and r_dict['classify']=='total':
if r_dict['feamethod']==name[0]:
sift_list.insert(0,int(r_dict['correct']/586*100))
elif r_dict['feamethod']==name[1]:
surf_list.insert(0,int(r_dict['correct']/586*100))
elif r_dict['feamethod'] == name[2]:
orb_list.insert(0, int(r_dict['correct']/586*100))
elif r_dict['feamethod']==name[3]:
brisk_list.insert(0,int(r_dict['correct']/586*100))
elif r_dict['mlmethod']==llabel[1] and r_dict['classify']=='total':
if r_dict['feamethod']==name[0]:
sift_list.insert(1,int(r_dict['correct']/586*100))
elif r_dict['feamethod']==name[1]:
surf_list.insert(1,int(r_dict['correct']/586*100))
elif r_dict['feamethod'] == name[2]:
orb_list.insert(1, int(r_dict['correct']/586*100))
elif r_dict['feamethod']==name[3]:
brisk_list.insert(1,int(r_dict['correct']/586*100))
elif r_dict['mlmethod'] == llabel[2] and r_dict['classify']=='total':
if r_dict['feamethod'] == name[0]:
sift_list.insert(2, int(r_dict['correct']/586*100))
elif r_dict['feamethod'] == name[1]:
surf_list.insert(2, int(r_dict['correct']/586*100))
elif r_dict['feamethod'] == name[2]:
orb_list.insert(2, int(r_dict['correct']/586*100))
elif r_dict['feamethod'] == name[3]:
brisk_list.insert(2, int(r_dict['correct']/586*100))
elif r_dict['mlmethod'] == llabel[3] and r_dict['classify']=='total':
if r_dict['feamethod'] == name[0]:
sift_list.insert(3, int(r_dict['correct']/586*100))
elif r_dict['feamethod'] == name[1]:
surf_list.insert(3, int(r_dict['correct']/586*100))
elif r_dict['feamethod'] == name[2]:
orb_list.insert(3, int(r_dict['correct']/586*100))
elif r_dict['feamethod'] == name[3]:
brisk_list.insert(3, int(r_dict['correct']/586*100))
return sift_list, surf_list, orb_list, brisk_list, name, llabel
|
/*JSTZ.min.js*/
var jstz=function(){function f(a){a=-a.getTimezoneOffset();return null!==a?a:0}function h(){return f(new Date(2010,0,1,0,0,0,0))}var b={timezone_name:"",uses_dst:"",utc_offset:0,utc_name:"",hemisphere:""};return function(){var a=h(),e=f(new Date(2010,5,1,0,0,0,0)),d=a-e;0>d&&(b.utc_offset=a);0<d&&(b.utc_offset=e);b.uses_dst=0==d?!1:!0;b.utc_offset=a;var c,a=""+b.utc_offset;jstz.olson.timezones[a]&&(c=jstz.olson.timezones[a]);a+=b.uses_dst?",1":",0";jstz.olson.timezones[a]&&(c=jstz.olson.timezones[a]);
"-120,1"==a&&(e=f(new Date),d=h(),e!=d&&(b.timezone_name=jstz.olson.timezones["-60,0"],b.hemisphere="south"));a+="south"==b.hemisphere?"s":"";jstz.olson.timezones[a]&&(c=jstz.olson.timezones[a]);b.timezone_name=c[0];"s"===c[1]&&(b.hemisphere="south");"n"===c[1]&&(b.hemisphere="north");if(jstz.olson.is_ambiguous(b.timezone_name))a:for(c=jstz.olson.ambiguity_list[b.timezone_name],a=c.length,e=c[0],d=0;d<a;d++){var e=c[d],g=jstz.olson.dst_start_dates[e],k=5<g.getMonth()?f(new Date(2010,5,1,0,0,0,0)):
h(),g=f(g);if(0!==k-g){b.timezone_name=e;break a}}b.utc_name="";0==parseInt(b.utc_offset%60)&&(c=parseInt(b.utc_offset/60),b.dst&&(c+=1),a=0>b.utc_offset?"+":"-",b.utc_name="Etc/GMT"+a+Math.abs(c));return b}()};
jstz.olson={timezones:{"-720":["Etc/GMT+12",""],"-660,0":["Pacific/Pago_Pago","n"],"-600,1":["America/Adak","n"],"-660,1,s":["Pacific/Apia","s"],"-600,0":["Pacific/Honolulu","n"],"-570":["Pacific/Marquesas","n"],"-540,0":["Pacific/Gambier",""],"-540,1":["America/Anchorage","n"],"-480,1":["America/Los_Angeles","n"],"-480,0":["Pacific/Pitcairn","n"],"-420,0":["America/Phoenix","n"],"-420,1":["America/Denver","n"],"-360,0":["America/Guatemala","n"],"-360,1":["America/Chicago","n"],"-360,1,s":["Pacific/Easter",
"s"],"-300,0":["America/Bogota","n"],"-300,1":["America/New_York","n"],"-270":["America/Caracas","n"],"-240,1":["America/Halifax","n"],"-240,0":["America/Santo_Domingo",""],"-240,1,s":["America/Asuncion","s"],"-210":["America/St_Johns","n"],"-180,1":["America/Godthab","n"],"-180,0":["America/Argentina/Buenos_Aires","s"],"-180,1,s":["America/Montevideo","s"],"-120,0":["America/Noronha","s"],"-120,1":["Atlantic/South_Georgia","s"],"-60,1":["Atlantic/Azores",""],"-60,0":["Atlantic/Cape_Verde","s"],"0,0":["Etc/UTC",
""],"0,1":["Europe/London","n"],"60,0":["Africa/Lagos","n"],"60,1":["Europe/Berlin","n"],"60,1,s":["Africa/Windhoek","s"],"120,1":["Asia/Beirut","n"],"120,0":["Africa/Johannesburg","n"],"180,1":["Europe/Moscow","n"],"180,0":["Asia/Baghdad","s"],"210,1":["Asia/Tehran","n"],"240,0":["Asia/Dubai","n"],"240,1":["Asia/Yerevan","n"],"240,1,s":["Etc/UTC+4","s"],270:["Asia/Kabul","n"],"300,1":["Asia/Yekaterinburg","n"],"300,0":["Asia/Karachi","n"],330:["Asia/Kolkata","n"],345:["Asia/Kathmandu","n"],"360,0":["Asia/Dhaka",
"n"],"360,1":["Asia/Omsk","n"],"390,0":["Asia/Rangoon","n"],"420,1":["Asia/Krasnoyarsk","n"],"420,0":["Asia/Jakarta","n"],"480,0":["Asia/Shanghai","n"],"480,1":["Asia/Irkutsk","n"],"480,1,s":["Australia/Perth","s"],"525,0":["Australia/Eucla","n"],"525,1":["Australia/Eucla","s"],"540,1":["Asia/Yakutsk","n"],"540,0":["Asia/Tokyo","n"],"570,0":["Australia/Darwin","n"],"570,1":["Australia/Adelaide","s"],"600,0":["Australia/Brisbane","n"],"600,1":["Asia/Vladivostok","n"],"600,1,s":["Australia/Sydney",
"s"],630:["Australia/Lord_Howe","s"],"660,1":["Asia/Kamchatka","n"],"660,0":["Pacific/Noumea","n"],690:["Pacific/Norfolk","n"],"720,1":["Etc/GMT+12","n"],"720,1,s":["Pacific/Auckland","s"],"720,0":["Pacific/Tarawa","n"],765:["Pacific/Chatham","s"],"780,0":["Pacific/Tongatapu","n"],"780,1,s":["Pacific/Pago_Pago","s"],"840,0":["Pacific/Kiritimati","n"]},is_ambiguous:function(f){return"undefined"!==typeof this.ambiguity_list[f]},ambiguity_list:{"America/Denver":["America/Denver","America/Mazatlan"],
"America/Chicago":["America/Chicago","America/Mexico_City"],"America/Asuncion":["Atlantic/Stanley","America/Asuncion","America/Santiago","America/Campo_Grande"],"America/Montevideo":["America/Montevideo","America/Sao_Paulo"],"Asia/Beirut":"Asia/Gaza Asia/Beirut Europe/Minsk Europe/Helsinki Europe/Istanbul Asia/Damascus Asia/Jerusalem Africa/Cairo".split(" "),"Asia/Yerevan":["Asia/Yerevan","Asia/Baku"],"Pacific/Auckland":["Pacific/Auckland","Pacific/Fiji"],"America/Los_Angeles":["America/Los_Angeles",
"America/Santa_Isabel"],"America/New_York":["America/Havana","America/New_York"],"America/Halifax":["America/Goose_Bay","America/Halifax"],"America/Godthab":["America/Miquelon","America/Godthab"]},dst_start_dates:{"America/Denver":new Date(2011,2,13,3,0,0,0),"America/Mazatlan":new Date(2011,3,3,3,0,0,0),"America/Chicago":new Date(2011,2,13,3,0,0,0),"America/Mexico_City":new Date(2011,3,3,3,0,0,0),"Atlantic/Stanley":new Date(2011,8,4,7,0,0,0),"America/Asuncion":new Date(2011,9,2,3,0,0,0),"America/Santiago":new Date(2011,
9,9,3,0,0,0),"America/Campo_Grande":new Date(2011,9,16,5,0,0,0),"America/Montevideo":new Date(2011,9,2,3,0,0,0),"America/Sao_Paulo":new Date(2011,9,16,5,0,0,0),"America/Los_Angeles":new Date(2011,2,13,8,0,0,0),"America/Santa_Isabel":new Date(2011,3,5,8,0,0,0),"America/Havana":new Date(2011,2,13,2,0,0,0),"America/New_York":new Date(2011,2,13,7,0,0,0),"Asia/Gaza":new Date(2011,2,26,23,0,0,0),"Asia/Beirut":new Date(2011,2,27,1,0,0,0),"Europe/Minsk":new Date(2011,2,27,3,0,0,0),"Europe/Helsinki":new Date(2011,
2,27,4,0,0,0),"Europe/Istanbul":new Date(2011,2,28,5,0,0,0),"Asia/Damascus":new Date(2011,3,1,2,0,0,0),"Asia/Jerusalem":new Date(2011,3,1,6,0,0,0),"Africa/Cairo":new Date(2010,3,30,4,0,0,0),"Asia/Yerevan":new Date(2011,2,27,4,0,0,0),"Asia/Baku":new Date(2011,2,27,8,0,0,0),"Pacific/Auckland":new Date(2011,8,26,7,0,0,0),"Pacific/Fiji":new Date(2010,11,29,23,0,0,0),"America/Halifax":new Date(2011,2,13,6,0,0,0),"America/Goose_Bay":new Date(2011,2,13,2,1,0,0),"America/Miquelon":new Date(2011,2,13,5,0,
0,0),"America/Godthab":new Date(2011,2,27,1,0,0,0)}};
/*Moment.min.js*/
(function(q){function B(a,b,c){switch(arguments.length){case 2:return null!=a?a:b;case 3:return null!=a?a:null!=b?b:c;default:throw Error("Implement me");}}function G(){return{empty:!1,unusedTokens:[],unusedInput:[],overflow:-2,charsLeftOver:0,nullInput:!1,invalidMonth:null,invalidFormat:!1,userInvalidated:!1,iso:!1}}function Y(a){!1===d.suppressDeprecationWarnings&&"undefined"!==typeof console&&console.warn&&console.warn("Deprecation warning: "+a)}function m(a,b){var c=!0;return w(function(){c&&
(Y(a),c=!1);return b.apply(this,arguments)},b)}function Z(a,b){return function(c){return l(a.call(this,c),b)}}function Fa(a,b){return function(c){return this.localeData().ordinal(a.call(this,c),b)}}function aa(){}function C(a,b){!1!==b&&ba(a);ca(this,a);this._d=new Date(+a._d);!1===H&&(H=!0,d.updateOffset(this),H=!1)}function I(a){a=da(a);var b=a.year||0,c=a.quarter||0,e=a.month||0,g=a.week||0,f=a.day||0;this._milliseconds=+(a.millisecond||0)+1E3*(a.second||0)+6E4*(a.minute||0)+36E5*(a.hour||0);this._days=
+f+7*g;this._months=+e+3*c+12*b;this._data={};this._locale=d.localeData();this._bubble()}function w(a,b){for(var c in b)x.call(b,c)&&(a[c]=b[c]);x.call(b,"toString")&&(a.toString=b.toString);x.call(b,"valueOf")&&(a.valueOf=b.valueOf);return a}function ca(a,b){var c,e,d;"undefined"!==typeof b._isAMomentObject&&(a._isAMomentObject=b._isAMomentObject);"undefined"!==typeof b._i&&(a._i=b._i);"undefined"!==typeof b._f&&(a._f=b._f);"undefined"!==typeof b._l&&(a._l=b._l);"undefined"!==typeof b._strict&&(a._strict=
b._strict);"undefined"!==typeof b._tzm&&(a._tzm=b._tzm);"undefined"!==typeof b._isUTC&&(a._isUTC=b._isUTC);"undefined"!==typeof b._offset&&(a._offset=b._offset);"undefined"!==typeof b._pf&&(a._pf=b._pf);"undefined"!==typeof b._locale&&(a._locale=b._locale);if(0<D.length)for(c in D)e=D[c],d=b[e],"undefined"!==typeof d&&(a[e]=d);return a}function r(a){return 0>a?Math.ceil(a):Math.floor(a)}function l(a,b,c){for(var e=""+Math.abs(a);e.length<b;)e="0"+e;return(0<=a?c?"+":"":"-")+e}function ea(a,b){var c=
{milliseconds:0,months:0};c.months=b.month()-a.month()+12*(b.year()-a.year());a.clone().add(c.months,"M").isAfter(b)&&--c.months;c.milliseconds=+b-+a.clone().add(c.months,"M");return c}function Ga(a,b){var c;b=J(b,a);a.isBefore(b)?c=ea(a,b):(c=ea(b,a),c.milliseconds=-c.milliseconds,c.months=-c.months);return c}function fa(a,b){return function(c,e){var g;null===e||isNaN(+e)||(ga[b]||(Y("moment()."+b+"(period, number) is deprecated. Please use moment()."+b+"(number, period)."),ga[b]=!0),g=c,c=e,e=g);
g=d.duration("string"===typeof c?+c:c,e);ha(this,g,a);return this}}function ha(a,b,c,e){var g=b._milliseconds,f=b._days;b=b._months;e=null==e?!0:e;g&&a._d.setTime(+a._d+g*c);f&&ia(a,"Date",K(a,"Date")+f*c);b&&ja(a,K(a,"Month")+b*c);e&&d.updateOffset(a,f||b)}function E(a){return"[object Array]"===Object.prototype.toString.call(a)}function L(a){return"[object Date]"===Object.prototype.toString.call(a)||a instanceof Date}function ka(a,b,c){var e=Math.min(a.length,b.length),d=Math.abs(a.length-b.length),
f=0,h;for(h=0;h<e;h++)(c&&a[h]!==b[h]||!c&&k(a[h])!==k(b[h]))&&f++;return f+d}function n(a){if(a){var b=a.toLowerCase().replace(/(.)s$/,"$1");a=Ha[a]||Ia[b]||b}return a}function da(a){var b={},c,e;for(e in a)x.call(a,e)&&(c=n(e))&&(b[c]=a[e]);return b}function Ja(a){var b,c;if(0===a.indexOf("week"))b=7,c="day";else if(0===a.indexOf("month"))b=12,c="month";else return;d[a]=function(e,g){var f,h,k=d._locale[a],l=[];"number"===typeof e&&(g=e,e=q);h=function(a){a=d().utc().set(c,a);return k.call(d._locale,
a,e||"")};if(null!=g)return h(g);for(f=0;f<b;f++)l.push(h(f));return l}}function k(a){a=+a;var b=0;0!==a&&isFinite(a)&&(b=0<=a?Math.floor(a):Math.ceil(a));return b}function M(a,b){return(new Date(Date.UTC(a,b+1,0))).getUTCDate()}function la(a,b,c){return y(d([a,11,31+b-c]),b,c).week}function N(a){return 0===a%4&&0!==a%100||0===a%400}function ba(a){var b;a._a&&-2===a._pf.overflow&&(b=0>a._a[1]||11<a._a[1]?1:1>a._a[2]||a._a[2]>M(a._a[0],a._a[1])?2:0>a._a[3]||24<a._a[3]||24===a._a[3]&&(0!==a._a[4]||
0!==a._a[5]||0!==a._a[6])?3:0>a._a[4]||59<a._a[4]?4:0>a._a[5]||59<a._a[5]?5:0>a._a[6]||999<a._a[6]?6:-1,a._pf._overflowDayOfYear&&(0>b||2<b)&&(b=2),a._pf.overflow=b)}function ma(a){null==a._isValid&&(a._isValid=!isNaN(a._d.getTime())&&0>a._pf.overflow&&!a._pf.empty&&!a._pf.invalidMonth&&!a._pf.nullInput&&!a._pf.invalidFormat&&!a._pf.userInvalidated,a._strict&&(a._isValid=a._isValid&&0===a._pf.charsLeftOver&&0===a._pf.unusedTokens.length&&a._pf.bigHour===q));return a._isValid}function na(a){return a?
a.toLowerCase().replace("_","-"):a}function oa(a){var b=null;if(!z[a]&&pa)try{b=d.locale(),require("./locale/"+a),d.locale(b)}catch(c){}return z[a]}function J(a,b){var c,e;return b._isUTC?(c=b.clone(),e=(d.isMoment(a)||L(a)?+a:+d(a))-+c,c._d.setTime(+c._d+e),d.updateOffset(c,!1),c):d(a).local()}function Ka(a){return a.match(/\[[\s\S]/)?a.replace(/^\[|\]$/g,""):a.replace(/\\/g,"")}function La(a){var b=a.match(qa),c,e;c=0;for(e=b.length;c<e;c++)b[c]=t[b[c]]?t[b[c]]:Ka(b[c]);return function(d){var f=
"";for(c=0;c<e;c++)f+=b[c]instanceof Function?b[c].call(d,a):b[c];return f}}function O(a,b){if(!a.isValid())return a.localeData().invalidDate();b=ra(b,a.localeData());P[b]||(P[b]=La(b));return P[b](a)}function ra(a,b){function c(a){return b.longDateFormat(a)||a}var e=5;for(F.lastIndex=0;0<=e&&F.test(a);)a=a.replace(F,c),F.lastIndex=0,--e;return a}function Ma(a,b){var c=b._strict;switch(a){case "Q":return sa;case "DDDD":return ta;case "YYYY":case "GGGG":case "gggg":return c?Na:Oa;case "Y":case "G":case "g":return Pa;
case "YYYYYY":case "YYYYY":case "GGGGG":case "ggggg":return c?Qa:Ra;case "S":if(c)return sa;case "SS":if(c)return ua;case "SSS":if(c)return ta;case "DDD":return Sa;case "MMM":case "MMMM":case "dd":case "ddd":case "dddd":return Ta;case "a":case "A":return b._locale._meridiemParse;case "x":return Ua;case "X":return Va;case "Z":case "ZZ":return Q;case "T":return Wa;case "SSSS":return Xa;case "MM":case "DD":case "YY":case "GG":case "gg":case "HH":case "hh":case "mm":case "ss":case "ww":case "WW":return c?
ua:va;case "M":case "D":case "d":case "H":case "h":case "m":case "s":case "w":case "W":case "e":case "E":return va;case "Do":return c?b._locale._ordinalParse:b._locale._ordinalParseLenient;default:var c=RegExp,e;e=Ya(a.replace("\\","")).replace(/[-\/\\^$*+?.()|[\]{}]/g,"\\$&");return new c(e)}}function R(a){a=(a||"").match(Q)||[];a=((a[a.length-1]||[])+"").match(Za)||["-",0,0];var b=+(60*a[1])+k(a[2]);return"+"===a[0]?b:-b}function S(a){var b,c,e=[],g;if(!a._d){g=new Date;g=a._useUTC?[g.getUTCFullYear(),
g.getUTCMonth(),g.getUTCDate()]:[g.getFullYear(),g.getMonth(),g.getDate()];if(a._w&&null==a._a[2]&&null==a._a[1]){var f,h,k;f=a._w;null!=f.GG||null!=f.W||null!=f.E?(b=1,k=4,c=B(f.GG,a._a[0],y(d(),1,4).year),h=B(f.W,1),f=B(f.E,1)):(b=a._locale._week.dow,k=a._locale._week.doy,c=B(f.gg,a._a[0],y(d(),b,k).year),h=B(f.w,1),null!=f.d?(f=f.d,f<b&&++h):f=null!=f.e?f.e+b:b);var l=T(c,0,1).getUTCDay(),l=0===l?7:l;k=7*(h-1)+((null!=f?f:b)-b)+(b-l+(l>k?7:0)-(l<b?7:0))+1;b=0<k?c:c-1;c=0<k?k:(N(c-1)?366:365)+k;
a._a[0]=b;a._dayOfYear=c}a._dayOfYear&&(c=B(a._a[0],g[0]),a._dayOfYear>(N(c)?366:365)&&(a._pf._overflowDayOfYear=!0),c=T(c,0,a._dayOfYear),a._a[1]=c.getUTCMonth(),a._a[2]=c.getUTCDate());for(c=0;3>c&&null==a._a[c];++c)a._a[c]=e[c]=g[c];for(;7>c;c++)a._a[c]=e[c]=null==a._a[c]?2===c?1:0:a._a[c];24===a._a[3]&&0===a._a[4]&&0===a._a[5]&&0===a._a[6]&&(a._nextDay=!0,a._a[3]=0);a._d=(a._useUTC?T:$a).apply(null,e);null!=a._tzm&&a._d.setUTCMinutes(a._d.getUTCMinutes()-a._tzm);a._nextDay&&(a._a[3]=24)}}function ab(a){var b;
a._d||(b=da(a._i),a._a=[b.year,b.month,b.day||b.date,b.hour,b.minute,b.second,b.millisecond],S(a))}function U(a){if(a._f===d.ISO_8601)wa(a);else{a._a=[];a._pf.empty=!0;var b=""+a._i,c,e,g,f,h,l=b.length,p=0;g=ra(a._f,a._locale).match(qa)||[];for(c=0;c<g.length;c++){f=g[c];if(e=(b.match(Ma(f,a))||[])[0])h=b.substr(0,b.indexOf(e)),0<h.length&&a._pf.unusedInput.push(h),b=b.slice(b.indexOf(e)+e.length),p+=e.length;if(t[f]){e?a._pf.empty=!1:a._pf.unusedTokens.push(f);h=a;var n=void 0,m=h._a;switch(f){case "Q":null!=
e&&(m[1]=3*(k(e)-1));break;case "M":case "MM":null!=e&&(m[1]=k(e)-1);break;case "MMM":case "MMMM":n=h._locale.monthsParse(e,f,h._strict);null!=n?m[1]=n:h._pf.invalidMonth=e;break;case "D":case "DD":null!=e&&(m[2]=k(e));break;case "Do":null!=e&&(m[2]=k(parseInt(e.match(/\d{1,2}/)[0],10)));break;case "DDD":case "DDDD":null!=e&&(h._dayOfYear=k(e));break;case "YY":m[0]=d.parseTwoDigitYear(e);break;case "YYYY":case "YYYYY":case "YYYYYY":m[0]=k(e);break;case "a":case "A":h._meridiem=e;break;case "h":case "hh":h._pf.bigHour=
!0;case "H":case "HH":m[3]=k(e);break;case "m":case "mm":m[4]=k(e);break;case "s":case "ss":m[5]=k(e);break;case "S":case "SS":case "SSS":case "SSSS":m[6]=k(1E3*("0."+e));break;case "x":h._d=new Date(k(e));break;case "X":h._d=new Date(1E3*parseFloat(e));break;case "Z":case "ZZ":h._useUTC=!0;h._tzm=R(e);break;case "dd":case "ddd":case "dddd":n=h._locale.weekdaysParse(e);null!=n?(h._w=h._w||{},h._w.d=n):h._pf.invalidWeekday=e;break;case "w":case "ww":case "W":case "WW":case "d":case "e":case "E":f=
f.substr(0,1);case "gggg":case "GGGG":case "GGGGG":f=f.substr(0,2);e&&(h._w=h._w||{},h._w[f]=k(e));break;case "gg":case "GG":h._w=h._w||{},h._w[f]=d.parseTwoDigitYear(e)}}else a._strict&&!e&&a._pf.unusedTokens.push(f)}a._pf.charsLeftOver=l-p;0<b.length&&a._pf.unusedInput.push(b);!0===a._pf.bigHour&&12>=a._a[3]&&(a._pf.bigHour=q);b=a._a;g=a._locale;c=a._a[3];l=a._meridiem;null!=l&&(null!=g.meridiemHour?c=g.meridiemHour(c,l):null!=g.isPM&&((g=g.isPM(l))&&12>c&&(c+=12),g||12!==c||(c=0)));b[3]=c;S(a);
ba(a)}}function Ya(a){return a.replace(/\\(\[)|\\(\])|\[([^\]\[]*)\]|\\(.)/g,function(a,c,e,d,f){return c||e||d||f})}function wa(a){var b,c,e=a._i,d=bb.exec(e);if(d){a._pf.iso=!0;b=0;for(c=V.length;b<c;b++)if(V[b][1].exec(e)){a._f=V[b][0]+(d[6]||" ");break}b=0;for(c=W.length;b<c;b++)if(W[b][1].exec(e)){a._f+=W[b][0];break}e.match(Q)&&(a._f+="Z");U(a)}else a._isValid=!1}function cb(a){wa(a);!1===a._isValid&&(delete a._isValid,d.createFromInputFallback(a))}function db(a,b){var c=[],e;for(e=0;e<a.length;++e)c.push(b(a[e],
e));return c}function eb(a){var b=a._i,c;b===q?a._d=new Date:L(b)?a._d=new Date(+b):null!==(c=fb.exec(b))?a._d=new Date(+c[1]):"string"===typeof b?cb(a):E(b)?(a._a=db(b.slice(0),function(a){return parseInt(a,10)}),S(a)):"object"===typeof b?ab(a):"number"===typeof b?a._d=new Date(b):d.createFromInputFallback(a)}function $a(a,b,c,e,d,f,h){b=new Date(a,b,c,e,d,f,h);1970>a&&b.setFullYear(a);return b}function T(a){var b=new Date(Date.UTC.apply(null,arguments));1970>a&&b.setUTCFullYear(a);return b}function gb(a,
b,c,e,d){return d.relativeTime(b||1,!!c,a,e)}function y(a,b,c){b=c-b;c-=a.day();c>b&&(c-=7);c<b-7&&(c+=7);a=d(a).add(c,"d");return{week:Math.ceil(a.dayOfYear()/7),year:a.year()}}function xa(a){var b=a._i,c=a._f;a._locale=a._locale||d.localeData(a._l);if(null===b||c===q&&""===b)return d.invalid({nullInput:!0});"string"===typeof b&&(a._i=b=a._locale.preparse(b));if(d.isMoment(b))return new C(b,!0);if(c)if(E(c)){var e,g,f;if(0===a._f.length)a._pf.invalidFormat=!0,a._d=new Date(NaN);else{for(b=0;b<a._f.length;b++)if(c=
0,e=ca({},a),null!=a._useUTC&&(e._useUTC=a._useUTC),e._pf=G(),e._f=a._f[b],U(e),ma(e)&&(c+=e._pf.charsLeftOver,c+=10*e._pf.unusedTokens.length,e._pf.score=c,null==f||c<f))f=c,g=e;w(a,g||e)}}else U(a);else eb(a);a=new C(a);a._nextDay&&(a.add(1,"d"),a._nextDay=q);return a}function ya(a,b){var c,e;1===b.length&&E(b[0])&&(b=b[0]);if(!b.length)return d();c=b[0];for(e=1;e<b.length;++e)b[e][a](c)&&(c=b[e]);return c}function ja(a,b){var c;if("string"===typeof b&&(b=a.localeData().monthsParse(b),"number"!==
typeof b))return a;c=Math.min(a.date(),M(a.year(),b));a._d["set"+(a._isUTC?"UTC":"")+"Month"](b,c);return a}function K(a,b){return a._d["get"+(a._isUTC?"UTC":"")+b]()}function ia(a,b,c){return"Month"===b?ja(a,c):a._d["set"+(a._isUTC?"UTC":"")+b](c)}function u(a,b){return function(c){return null!=c?(ia(this,a,c),d.updateOffset(this,b),this):K(this,a)}}function hb(a){d.duration.fn[a]=function(){return this._data[a]}}function za(a){"undefined"===typeof ender&&(Aa=X.moment,X.moment=a?m("Accessing Moment through the global scope is deprecated, and will be removed in an upcoming release.",
d):d)}for(var d,X="undefined"===typeof global||"undefined"!==typeof window&&window!==global.window?this:global,Aa,A=Math.round,x=Object.prototype.hasOwnProperty,p,z={},D=[],pa="undefined"!==typeof module&&module&&module.exports,fb=/^\/?Date\((\-?\d+)/i,ib=/(\-)?(?:(\d*)\.)?(\d+)\:(\d+)(?:\:(\d+)\.?(\d{3})?)?/,jb=/^(-)?P(?:(?:([0-9,.]*)Y)?(?:([0-9,.]*)M)?(?:([0-9,.]*)D)?(?:T(?:([0-9,.]*)H)?(?:([0-9,.]*)M)?(?:([0-9,.]*)S)?)?|([0-9,.]*)W)$/,qa=/(\[[^\[]*\])|(\\)?(Mo|MM?M?M?|Do|DDDo|DD?D?D?|ddd?d?|do?|w[o|w]?|W[o|W]?|Q|YYYYYY|YYYYY|YYYY|YY|gg(ggg?)?|GG(GGG?)?|e|E|a|A|hh?|HH?|mm?|ss?|S{1,4}|x|X|zz?|ZZ?|.)/g,
F=/(\[[^\[]*\])|(\\)?(LTS|LT|LL?L?L?|l{1,4})/g,va=/\d\d?/,Sa=/\d{1,3}/,Oa=/\d{1,4}/,Ra=/[+\-]?\d{1,6}/,Xa=/\d+/,Ta=/[0-9]*['a-z\u00A0-\u05FF\u0700-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]+|[\u0600-\u06FF\/]+(\s*?[\u0600-\u06FF]+){1,2}/i,Q=/Z|[\+\-]\d\d:?\d\d/gi,Wa=/T/i,Ua=/[\+\-]?\d+/,Va=/[\+\-]?\d+(\.\d{1,3})?/,sa=/\d/,ua=/\d\d/,ta=/\d{3}/,Na=/\d{4}/,Qa=/[+-]?\d{6}/,Pa=/[+-]?\d+/,bb=/^\s*(?:[+-]\d{6}|\d{4})-(?:(\d\d-\d\d)|(W\d\d$)|(W\d\d-\d)|(\d\d\d))((T| )(\d\d(:\d\d(:\d\d(\.\d+)?)?)?)?([\+\-]\d\d(?::?\d\d)?|\s*Z)?)?$/,
V=[["YYYYYY-MM-DD",/[+-]\d{6}-\d{2}-\d{2}/],["YYYY-MM-DD",/\d{4}-\d{2}-\d{2}/],["GGGG-[W]WW-E",/\d{4}-W\d{2}-\d/],["GGGG-[W]WW",/\d{4}-W\d{2}/],["YYYY-DDD",/\d{4}-\d{3}/]],W=[["HH:mm:ss.SSSS",/(T| )\d\d:\d\d:\d\d\.\d+/],["HH:mm:ss",/(T| )\d\d:\d\d:\d\d/],["HH:mm",/(T| )\d\d:\d\d/],["HH",/(T| )\d\d/]],Za=/([\+\-]|\d\d)/gi,Ba={Milliseconds:1,Seconds:1E3,Minutes:6E4,Hours:36E5,Days:864E5,Months:2592E6,Years:31536E6},Ha={ms:"millisecond",s:"second",m:"minute",h:"hour",d:"day",D:"date",w:"week",W:"isoWeek",
M:"month",Q:"quarter",y:"year",DDD:"dayOfYear",e:"weekday",E:"isoWeekday",gg:"weekYear",GG:"isoWeekYear"},Ia={dayofyear:"dayOfYear",isoweekday:"isoWeekday",isoweek:"isoWeek",weekyear:"weekYear",isoweekyear:"isoWeekYear"},P={},v={s:45,m:45,h:22,d:26,M:11},Ca="DDD w W M D d".split(" "),Da="MDHhmswW".split(""),t={M:function(){return this.month()+1},MMM:function(a){return this.localeData().monthsShort(this,a)},MMMM:function(a){return this.localeData().months(this,a)},D:function(){return this.date()},
DDD:function(){return this.dayOfYear()},d:function(){return this.day()},dd:function(a){return this.localeData().weekdaysMin(this,a)},ddd:function(a){return this.localeData().weekdaysShort(this,a)},dddd:function(a){return this.localeData().weekdays(this,a)},w:function(){return this.week()},W:function(){return this.isoWeek()},YY:function(){return l(this.year()%100,2)},YYYY:function(){return l(this.year(),4)},YYYYY:function(){return l(this.year(),5)},YYYYYY:function(){var a=this.year();return(0<=a?"+":
"-")+l(Math.abs(a),6)},gg:function(){return l(this.weekYear()%100,2)},gggg:function(){return l(this.weekYear(),4)},ggggg:function(){return l(this.weekYear(),5)},GG:function(){return l(this.isoWeekYear()%100,2)},GGGG:function(){return l(this.isoWeekYear(),4)},GGGGG:function(){return l(this.isoWeekYear(),5)},e:function(){return this.weekday()},E:function(){return this.isoWeekday()},a:function(){return this.localeData().meridiem(this.hours(),this.minutes(),!0)},A:function(){return this.localeData().meridiem(this.hours(),
this.minutes(),!1)},H:function(){return this.hours()},h:function(){return this.hours()%12||12},m:function(){return this.minutes()},s:function(){return this.seconds()},S:function(){return k(this.milliseconds()/100)},SS:function(){return l(k(this.milliseconds()/10),2)},SSS:function(){return l(this.milliseconds(),3)},SSSS:function(){return l(this.milliseconds(),3)},Z:function(){var a=this.utcOffset(),b="+";0>a&&(a=-a,b="-");return b+l(k(a/60),2)+":"+l(k(a)%60,2)},ZZ:function(){var a=this.utcOffset(),
b="+";0>a&&(a=-a,b="-");return b+l(k(a/60),2)+l(k(a)%60,2)},z:function(){return this.zoneAbbr()},zz:function(){return this.zoneName()},x:function(){return this.valueOf()},X:function(){return this.unix()},Q:function(){return this.quarter()}},ga={},Ea=["months","monthsShort","weekdays","weekdaysShort","weekdaysMin"],H=!1;Ca.length;)p=Ca.pop(),t[p+"o"]=Fa(t[p],p);for(;Da.length;)p=Da.pop(),t[p+p]=Z(t[p],2);t.DDDD=Z(t.DDD,3);w(aa.prototype,{set:function(a){var b,c;for(c in a)b=a[c],"function"===typeof b?
this[c]=b:this["_"+c]=b;this._ordinalParseLenient=new RegExp(this._ordinalParse.source+"|"+/\d{1,2}/.source)},_months:"January February March April May June July August September October November December".split(" "),months:function(a){return this._months[a.month()]},_monthsShort:"Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec".split(" "),monthsShort:function(a){return this._monthsShort[a.month()]},monthsParse:function(a,b,c){var e,g;this._monthsParse||(this._monthsParse=[],this._longMonthsParse=
[],this._shortMonthsParse=[]);for(e=0;12>e;e++)if(g=d.utc([2E3,e]),c&&!this._longMonthsParse[e]&&(this._longMonthsParse[e]=new RegExp("^"+this.months(g,"").replace(".","")+"$","i"),this._shortMonthsParse[e]=new RegExp("^"+this.monthsShort(g,"").replace(".","")+"$","i")),c||this._monthsParse[e]||(g="^"+this.months(g,"")+"|^"+this.monthsShort(g,""),this._monthsParse[e]=new RegExp(g.replace(".",""),"i")),c&&"MMMM"===b&&this._longMonthsParse[e].test(a)||c&&"MMM"===b&&this._shortMonthsParse[e].test(a)||
!c&&this._monthsParse[e].test(a))return e},_weekdays:"Sunday Monday Tuesday Wednesday Thursday Friday Saturday".split(" "),weekdays:function(a){return this._weekdays[a.day()]},_weekdaysShort:"Sun Mon Tue Wed Thu Fri Sat".split(" "),weekdaysShort:function(a){return this._weekdaysShort[a.day()]},_weekdaysMin:"Su Mo Tu We Th Fr Sa".split(" "),weekdaysMin:function(a){return this._weekdaysMin[a.day()]},weekdaysParse:function(a){var b,c;this._weekdaysParse||(this._weekdaysParse=[]);for(b=0;7>b;b++)if(this._weekdaysParse[b]||
(c=d([2E3,1]).day(b),c="^"+this.weekdays(c,"")+"|^"+this.weekdaysShort(c,"")+"|^"+this.weekdaysMin(c,""),this._weekdaysParse[b]=new RegExp(c.replace(".",""),"i")),this._weekdaysParse[b].test(a))return b},_longDateFormat:{LTS:"h:mm:ss A",LT:"h:mm A",L:"MM/DD/YYYY",LL:"MMMM D, YYYY",LLL:"MMMM D, YYYY LT",LLLL:"dddd, MMMM D, YYYY LT"},longDateFormat:function(a){var b=this._longDateFormat[a];!b&&this._longDateFormat[a.toUpperCase()]&&(b=this._longDateFormat[a.toUpperCase()].replace(/MMMM|MM|DD|dddd/g,
function(a){return a.slice(1)}),this._longDateFormat[a]=b);return b},isPM:function(a){return"p"===(a+"").toLowerCase().charAt(0)},_meridiemParse:/[ap]\.?m?\.?/i,meridiem:function(a,b,c){return 11<a?c?"pm":"PM":c?"am":"AM"},_calendar:{sameDay:"[Today at] LT",nextDay:"[Tomorrow at] LT",nextWeek:"dddd [at] LT",lastDay:"[Yesterday at] LT",lastWeek:"[Last] dddd [at] LT",sameElse:"L"},calendar:function(a,b,c){a=this._calendar[a];return"function"===typeof a?a.apply(b,[c]):a},_relativeTime:{future:"in %s",
past:"%s ago",s:"a few seconds",m:"a minute",mm:"%d minutes",h:"an hour",hh:"%d hours",d:"a day",dd:"%d days",M:"a month",MM:"%d months",y:"a year",yy:"%d years"},relativeTime:function(a,b,c,e){var d=this._relativeTime[c];return"function"===typeof d?d(a,b,c,e):d.replace(/%d/i,a)},pastFuture:function(a,b){var c=this._relativeTime[0<a?"future":"past"];return"function"===typeof c?c(b):c.replace(/%s/i,b)},ordinal:function(a){return this._ordinal.replace("%d",a)},_ordinal:"%d",_ordinalParse:/\d{1,2}/,
preparse:function(a){return a},postformat:function(a){return a},week:function(a){return y(a,this._week.dow,this._week.doy).week},_week:{dow:0,doy:6},firstDayOfWeek:function(){return this._week.dow},firstDayOfYear:function(){return this._week.doy},_invalidDate:"Invalid date",invalidDate:function(){return this._invalidDate}});d=function(a,b,c,e){var d;"boolean"===typeof c&&(e=c,c=q);d={_isAMomentObject:!0};d._i=a;d._f=b;d._l=c;d._strict=e;d._isUTC=!1;d._pf=G();return xa(d)};d.suppressDeprecationWarnings=
!1;d.createFromInputFallback=m("moment construction falls back to js Date. This is discouraged and will be removed in upcoming major release. Please refer to https://github.com/moment/moment/issues/1407 for more info.",function(a){a._d=new Date(a._i+(a._useUTC?" UTC":""))});d.min=function(){var a=[].slice.call(arguments,0);return ya("isBefore",a)};d.max=function(){var a=[].slice.call(arguments,0);return ya("isAfter",a)};d.utc=function(a,b,c,e){var d;"boolean"===typeof c&&(e=c,c=q);d={_isAMomentObject:!0,
_useUTC:!0,_isUTC:!0};d._l=c;d._i=a;d._f=b;d._strict=e;d._pf=G();return xa(d).utc()};d.unix=function(a){return d(1E3*a)};d.duration=function(a,b){var c=a,e=null,g;d.isDuration(a)?c={ms:a._milliseconds,d:a._days,M:a._months}:"number"===typeof a?(c={},b?c[b]=a:c.milliseconds=a):(e=ib.exec(a))?(g="-"===e[1]?-1:1,c={y:0,d:k(e[2])*g,h:k(e[3])*g,m:k(e[4])*g,s:k(e[5])*g,ms:k(e[6])*g}):(e=jb.exec(a))?(g="-"===e[1]?-1:1,c=function(a){a=a&&parseFloat(a.replace(",","."));return(isNaN(a)?0:a)*g},c={y:c(e[2]),
M:c(e[3]),d:c(e[4]),h:c(e[5]),m:c(e[6]),s:c(e[7]),w:c(e[8])}):null==c?c={}:"object"===typeof c&&("from"in c||"to"in c)&&(e=Ga(d(c.from),d(c.to)),c={},c.ms=e.milliseconds,c.M=e.months);c=new I(c);d.isDuration(a)&&x.call(a,"_locale")&&(c._locale=a._locale);return c};d.version="2.9.0";d.defaultFormat="YYYY-MM-DDTHH:mm:ssZ";d.ISO_8601=function(){};d.momentProperties=D;d.updateOffset=function(){};d.relativeTimeThreshold=function(a,b){if(v[a]===q)return!1;if(b===q)return v[a];v[a]=b;return!0};d.lang=m("moment.lang is deprecated. Use moment.locale instead.",
function(a,b){return d.locale(a,b)});d.locale=function(a,b){var c;a&&(c="undefined"!==typeof b?d.defineLocale(a,b):d.localeData(a))&&(d.duration._locale=d._locale=c);return d._locale._abbr};d.defineLocale=function(a,b){if(null!==b)return b.abbr=a,z[a]||(z[a]=new aa),z[a].set(b),d.locale(a),z[a];delete z[a];return null};d.langData=m("moment.langData is deprecated. Use moment.localeData instead.",function(a){return d.localeData(a)});d.localeData=function(a){var b;a&&a._locale&&a._locale._abbr&&(a=a._locale._abbr);
if(!a)return d._locale;if(!E(a)){if(b=oa(a))return b;a=[a]}a:{b=0;for(var c,e,g,f;b<a.length;){f=na(a[b]).split("-");c=f.length;for(e=(e=na(a[b+1]))?e.split("-"):null;0<c;){if(g=oa(f.slice(0,c).join("-"))){a=g;break a}if(e&&e.length>=c&&ka(f,e,!0)>=c-1)break;c--}b++}a=null}return a};d.isMoment=function(a){return a instanceof C||null!=a&&x.call(a,"_isAMomentObject")};d.isDuration=function(a){return a instanceof I};for(p=Ea.length-1;0<=p;--p)Ja(Ea[p]);d.normalizeUnits=function(a){return n(a)};d.invalid=
function(a){var b=d.utc(NaN);null!=a?w(b._pf,a):b._pf.userInvalidated=!0;return b};d.parseZone=function(){return d.apply(null,arguments).parseZone()};d.parseTwoDigitYear=function(a){return k(a)+(68<k(a)?1900:2E3)};d.isDate=L;w(d.fn=C.prototype,{clone:function(){return d(this)},valueOf:function(){return+this._d-6E4*(this._offset||0)},unix:function(){return Math.floor(+this/1E3)},toString:function(){return this.clone().locale("en").format("ddd MMM DD YYYY HH:mm:ss [GMT]ZZ")},toDate:function(){return this._offset?
new Date(+this):this._d},toISOString:function(){var a=d(this).utc();return 0<a.year()&&9999>=a.year()?"function"===typeof Date.prototype.toISOString?this.toDate().toISOString():O(a,"YYYY-MM-DD[T]HH:mm:ss.SSS[Z]"):O(a,"YYYYYY-MM-DD[T]HH:mm:ss.SSS[Z]")},toArray:function(){return[this.year(),this.month(),this.date(),this.hours(),this.minutes(),this.seconds(),this.milliseconds()]},isValid:function(){return ma(this)},isDSTShifted:function(){return this._a?this.isValid()&&0<ka(this._a,(this._isUTC?d.utc(this._a):
d(this._a)).toArray()):!1},parsingFlags:function(){return w({},this._pf)},invalidAt:function(){return this._pf.overflow},utc:function(a){return this.utcOffset(0,a)},local:function(a){this._isUTC&&(this.utcOffset(0,a),this._isUTC=!1,a&&this.subtract(this._dateUtcOffset(),"m"));return this},format:function(a){a=O(this,a||d.defaultFormat);return this.localeData().postformat(a)},add:fa(1,"add"),subtract:fa(-1,"subtract"),diff:function(a,b,c){a=J(a,this);var e=6E4*(a.utcOffset()-this.utcOffset());b=n(b);
if("year"===b||"month"===b||"quarter"===b){var e=12*(a.year()-this.year())+(a.month()-this.month()),d=this.clone().add(e,"months"),f;0>a-d?(f=this.clone().add(e-1,"months"),a=(a-d)/(d-f)):(f=this.clone().add(e+1,"months"),a=(a-d)/(f-d));a=-(e+a);"quarter"===b?a/=3:"year"===b&&(a/=12)}else a=this-a,a="second"===b?a/1E3:"minute"===b?a/6E4:"hour"===b?a/36E5:"day"===b?(a-e)/864E5:"week"===b?(a-e)/6048E5:a;return c?a:r(a)},from:function(a,b){return d.duration({to:this,from:a}).locale(this.locale()).humanize(!b)},
fromNow:function(a){return this.from(d(),a)},calendar:function(a){a=a||d();var b=J(a,this).startOf("day"),b=this.diff(b,"days",!0),b=-6>b?"sameElse":-1>b?"lastWeek":0>b?"lastDay":1>b?"sameDay":2>b?"nextDay":7>b?"nextWeek":"sameElse";return this.format(this.localeData().calendar(b,this,d(a)))},isLeapYear:function(){return N(this.year())},isDST:function(){return this.utcOffset()>this.clone().month(0).utcOffset()||this.utcOffset()>this.clone().month(5).utcOffset()},day:function(a){var b=this._isUTC?
this._d.getUTCDay():this._d.getDay();if(null!=a){a:{var c=this.localeData();if("string"===typeof a)if(isNaN(a)){if(a=c.weekdaysParse(a),"number"!==typeof a){a=null;break a}}else a=parseInt(a,10)}return this.add(a-b,"d")}return b},month:u("Month",!0),startOf:function(a){a=n(a);switch(a){case "year":this.month(0);case "quarter":case "month":this.date(1);case "week":case "isoWeek":case "day":this.hours(0);case "hour":this.minutes(0);case "minute":this.seconds(0);case "second":this.milliseconds(0)}"week"===
a?this.weekday(0):"isoWeek"===a&&this.isoWeekday(1);"quarter"===a&&this.month(3*Math.floor(this.month()/3));return this},endOf:function(a){a=n(a);return a===q||"millisecond"===a?this:this.startOf(a).add(1,"isoWeek"===a?"week":a).subtract(1,"ms")},isAfter:function(a,b){b=n("undefined"!==typeof b?b:"millisecond");return"millisecond"===b?(a=d.isMoment(a)?a:d(a),+this>+a):(d.isMoment(a)?+a:+d(a))<+this.clone().startOf(b)},isBefore:function(a,b){var c;b=n("undefined"!==typeof b?b:"millisecond");if("millisecond"===
b)return a=d.isMoment(a)?a:d(a),+this<+a;c=d.isMoment(a)?+a:+d(a);return+this.clone().endOf(b)<c},isBetween:function(a,b,c){return this.isAfter(a,c)&&this.isBefore(b,c)},isSame:function(a,b){var c;b=n(b||"millisecond");if("millisecond"===b)return a=d.isMoment(a)?a:d(a),+this===+a;c=+d(a);return+this.clone().startOf(b)<=c&&c<=+this.clone().endOf(b)},min:m("moment().min is deprecated, use moment.min instead. https://github.com/moment/moment/issues/1548",function(a){a=d.apply(null,arguments);return a<
this?this:a}),max:m("moment().max is deprecated, use moment.max instead. https://github.com/moment/moment/issues/1548",function(a){a=d.apply(null,arguments);return a>this?this:a}),zone:m("moment().zone is deprecated, use moment().utcOffset instead. https://github.com/moment/moment/issues/1779",function(a,b){return null!=a?("string"!==typeof a&&(a=-a),this.utcOffset(a,b),this):-this.utcOffset()}),utcOffset:function(a,b){var c=this._offset||0,e;return null!=a?("string"===typeof a&&(a=R(a)),16>Math.abs(a)&&
(a*=60),!this._isUTC&&b&&(e=this._dateUtcOffset()),this._offset=a,this._isUTC=!0,null!=e&&this.add(e,"m"),c!==a&&(!b||this._changeInProgress?ha(this,d.duration(a-c,"m"),1,!1):this._changeInProgress||(this._changeInProgress=!0,d.updateOffset(this,!0),this._changeInProgress=null)),this):this._isUTC?c:this._dateUtcOffset()},isLocal:function(){return!this._isUTC},isUtcOffset:function(){return this._isUTC},isUtc:function(){return this._isUTC&&0===this._offset},zoneAbbr:function(){return this._isUTC?"UTC":
""},zoneName:function(){return this._isUTC?"Coordinated Universal Time":""},parseZone:function(){this._tzm?this.utcOffset(this._tzm):"string"===typeof this._i&&this.utcOffset(R(this._i));return this},hasAlignedHourOffset:function(a){a=a?d(a).utcOffset():0;return 0===(this.utcOffset()-a)%60},daysInMonth:function(){return M(this.year(),this.month())},dayOfYear:function(a){var b=A((d(this).startOf("day")-d(this).startOf("year"))/864E5)+1;return null==a?b:this.add(a-b,"d")},quarter:function(a){return null==
a?Math.ceil((this.month()+1)/3):this.month(3*(a-1)+this.month()%3)},weekYear:function(a){var b=y(this,this.localeData()._week.dow,this.localeData()._week.doy).year;return null==a?b:this.add(a-b,"y")},isoWeekYear:function(a){var b=y(this,1,4).year;return null==a?b:this.add(a-b,"y")},week:function(a){var b=this.localeData().week(this);return null==a?b:this.add(7*(a-b),"d")},isoWeek:function(a){var b=y(this,1,4).week;return null==a?b:this.add(7*(a-b),"d")},weekday:function(a){var b=(this.day()+7-this.localeData()._week.dow)%
7;return null==a?b:this.add(a-b,"d")},isoWeekday:function(a){return null==a?this.day()||7:this.day(this.day()%7?a:a-7)},isoWeeksInYear:function(){return la(this.year(),1,4)},weeksInYear:function(){var a=this.localeData()._week;return la(this.year(),a.dow,a.doy)},get:function(a){a=n(a);return this[a]()},set:function(a,b){var c;if("object"===typeof a)for(c in a)this.set(c,a[c]);else if(a=n(a),"function"===typeof this[a])this[a](b);return this},locale:function(a){if(a===q)return this._locale._abbr;a=
d.localeData(a);null!=a&&(this._locale=a);return this},lang:m("moment().lang() is deprecated. Instead, use moment().localeData() to get the language configuration. Use moment().locale() to change languages.",function(a){return a===q?this.localeData():this.locale(a)}),localeData:function(){return this._locale},_dateUtcOffset:function(){return 15*-Math.round(this._d.getTimezoneOffset()/15)}});d.fn.millisecond=d.fn.milliseconds=u("Milliseconds",!1);d.fn.second=d.fn.seconds=u("Seconds",!1);d.fn.minute=
d.fn.minutes=u("Minutes",!1);d.fn.hour=d.fn.hours=u("Hours",!0);d.fn.date=u("Date",!0);d.fn.dates=m("dates accessor is deprecated. Use date instead.",u("Date",!0));d.fn.year=u("FullYear",!0);d.fn.years=m("years accessor is deprecated. Use year instead.",u("FullYear",!0));d.fn.days=d.fn.day;d.fn.months=d.fn.month;d.fn.weeks=d.fn.week;d.fn.isoWeeks=d.fn.isoWeek;d.fn.quarters=d.fn.quarter;d.fn.toJSON=d.fn.toISOString;d.fn.isUTC=d.fn.isUtc;w(d.duration.fn=I.prototype,{_bubble:function(){var a=this._milliseconds,
b=this._days,c=this._months,e=this._data,d=0;e.milliseconds=a%1E3;a=r(a/1E3);e.seconds=a%60;a=r(a/60);e.minutes=a%60;a=r(a/60);e.hours=a%24;b+=r(a/24);d=r(400*b/146097);b-=r(146097*d/400);c+=r(b/30);b%=30;d+=r(c/12);e.days=b;e.months=c%12;e.years=d},abs:function(){this._milliseconds=Math.abs(this._milliseconds);this._days=Math.abs(this._days);this._months=Math.abs(this._months);this._data.milliseconds=Math.abs(this._data.milliseconds);this._data.seconds=Math.abs(this._data.seconds);this._data.minutes=
Math.abs(this._data.minutes);this._data.hours=Math.abs(this._data.hours);this._data.months=Math.abs(this._data.months);this._data.years=Math.abs(this._data.years);return this},weeks:function(){return r(this.days()/7)},valueOf:function(){return this._milliseconds+864E5*this._days+this._months%12*2592E6+31536E6*k(this._months/12)},humanize:function(a){var b;b=!a;var c=this.localeData(),e=d.duration(this).abs(),g=A(e.as("s")),f=A(e.as("m")),h=A(e.as("h")),k=A(e.as("d")),l=A(e.as("M")),e=A(e.as("y")),
g=g<v.s&&["s",g]||1===f&&["m"]||f<v.m&&["mm",f]||1===h&&["h"]||h<v.h&&["hh",h]||1===k&&["d"]||k<v.d&&["dd",k]||1===l&&["M"]||l<v.M&&["MM",l]||1===e&&["y"]||["yy",e];g[2]=b;g[3]=0<+this;g[4]=c;b=gb.apply({},g);a&&(b=this.localeData().pastFuture(+this,b));return this.localeData().postformat(b)},add:function(a,b){var c=d.duration(a,b);this._milliseconds+=c._milliseconds;this._days+=c._days;this._months+=c._months;this._bubble();return this},subtract:function(a,b){var c=d.duration(a,b);this._milliseconds-=
c._milliseconds;this._days-=c._days;this._months-=c._months;this._bubble();return this},get:function(a){a=n(a);return this[a.toLowerCase()+"s"]()},as:function(a){var b;a=n(a);if("month"===a||"year"===a)return b=this._days+this._milliseconds/864E5,b=this._months+400*b/146097*12,"month"===a?b:b/12;b=this._days+Math.round(this._months/12*146097/400);switch(a){case "week":return b/7+this._milliseconds/6048E5;case "day":return b+this._milliseconds/864E5;case "hour":return 24*b+this._milliseconds/36E5;
case "minute":return 1440*b+this._milliseconds/6E4;case "second":return 86400*b+this._milliseconds/1E3;case "millisecond":return Math.floor(864E5*b)+this._milliseconds;default:throw Error("Unknown unit "+a);}},lang:d.fn.lang,locale:d.fn.locale,toIsoString:m("toIsoString() is deprecated. Please use toISOString() instead (notice the capitals)",function(){return this.toISOString()}),toISOString:function(){var a=Math.abs(this.years()),b=Math.abs(this.months()),c=Math.abs(this.days()),e=Math.abs(this.hours()),
d=Math.abs(this.minutes()),f=Math.abs(this.seconds()+this.milliseconds()/1E3);return this.asSeconds()?(0>this.asSeconds()?"-":"")+"P"+(a?a+"Y":"")+(b?b+"M":"")+(c?c+"D":"")+(e||d||f?"T":"")+(e?e+"H":"")+(d?d+"M":"")+(f?f+"S":""):"P0D"},localeData:function(){return this._locale},toJSON:function(){return this.toISOString()}});d.duration.fn.toString=d.duration.fn.toISOString;for(p in Ba)x.call(Ba,p)&&hb(p.toLowerCase());d.duration.fn.asMilliseconds=function(){return this.as("ms")};d.duration.fn.asSeconds=
function(){return this.as("s")};d.duration.fn.asMinutes=function(){return this.as("m")};d.duration.fn.asHours=function(){return this.as("h")};d.duration.fn.asDays=function(){return this.as("d")};d.duration.fn.asWeeks=function(){return this.as("weeks")};d.duration.fn.asMonths=function(){return this.as("M")};d.duration.fn.asYears=function(){return this.as("y")};d.locale("en",{ordinalParse:/\d{1,2}(th|st|nd|rd)/,ordinal:function(a){var b=a%10,b=1===k(a%100/10)?"th":1===b?"st":2===b?"nd":3===b?"rd":"th";
return a+b}});pa?module.exports=d:"function"===typeof define&&define.amd?(define(function(a,b,c){c.config&&c.config()&&!0===c.config().noGlobal&&(X.moment=Aa);return d}),za(!0)):za()}).call(this);
/*!
DataTables 1.10.12
©2008-2014 SpryMedia Ltd - datatables.net/license
*/
/*
* This combined file was created by the DataTables downloader builder:
* https://datatables.net/download
*
* To update or modify this file with the latest versions of the included
* software please visit:
* https://datatables.net/download/#dt/dt-1.10.12
*
* Included libraries:
* DataTables 1.10.12
*/
/*!
DataTables 1.10.12
©2008-2015 SpryMedia Ltd - datatables.net/license
*/
(function(h){"function"===typeof define&&define.amd?define(["jquery"],function(D){return h(D,window,document)}):"object"===typeof exports?module.exports=function(D,I){D||(D=window);I||(I="undefined"!==typeof window?require("jquery"):require("jquery")(D));return h(I,D,D.document)}:h(jQuery,window,document)})(function(h,D,I,k){function X(a){var b,c,d={};h.each(a,function(e){if((b=e.match(/^([^A-Z]+?)([A-Z])/))&&-1!=="a aa ai ao as b fn i m o s ".indexOf(b[1]+" "))c=e.replace(b[0],b[2].toLowerCase()),
d[c]=e,"o"===b[1]&&X(a[e])});a._hungarianMap=d}function K(a,b,c){a._hungarianMap||X(a);var d;h.each(b,function(e){d=a._hungarianMap[e];if(d!==k&&(c||b[d]===k))"o"===d.charAt(0)?(b[d]||(b[d]={}),h.extend(!0,b[d],b[e]),K(a[d],b[d],c)):b[d]=b[e]})}function Da(a){var b=m.defaults.oLanguage,c=a.sZeroRecords;!a.sEmptyTable&&(c&&"No data available in table"===b.sEmptyTable)&&E(a,a,"sZeroRecords","sEmptyTable");!a.sLoadingRecords&&(c&&"Loading..."===b.sLoadingRecords)&&E(a,a,"sZeroRecords","sLoadingRecords");
a.sInfoThousands&&(a.sThousands=a.sInfoThousands);(a=a.sDecimal)&&db(a)}function eb(a){A(a,"ordering","bSort");A(a,"orderMulti","bSortMulti");A(a,"orderClasses","bSortClasses");A(a,"orderCellsTop","bSortCellsTop");A(a,"order","aaSorting");A(a,"orderFixed","aaSortingFixed");A(a,"paging","bPaginate");A(a,"pagingType","sPaginationType");A(a,"pageLength","iDisplayLength");A(a,"searching","bFilter");"boolean"===typeof a.sScrollX&&(a.sScrollX=a.sScrollX?"100%":"");"boolean"===typeof a.scrollX&&(a.scrollX=
a.scrollX?"100%":"");if(a=a.aoSearchCols)for(var b=0,c=a.length;b<c;b++)a[b]&&K(m.models.oSearch,a[b])}function fb(a){A(a,"orderable","bSortable");A(a,"orderData","aDataSort");A(a,"orderSequence","asSorting");A(a,"orderDataType","sortDataType");var b=a.aDataSort;b&&!h.isArray(b)&&(a.aDataSort=[b])}function gb(a){if(!m.__browser){var b={};m.__browser=b;var c=h("<div/>").css({position:"fixed",top:0,left:0,height:1,width:1,overflow:"hidden"}).append(h("<div/>").css({position:"absolute",top:1,left:1,
width:100,overflow:"scroll"}).append(h("<div/>").css({width:"100%",height:10}))).appendTo("body"),d=c.children(),e=d.children();b.barWidth=d[0].offsetWidth-d[0].clientWidth;b.bScrollOversize=100===e[0].offsetWidth&&100!==d[0].clientWidth;b.bScrollbarLeft=1!==Math.round(e.offset().left);b.bBounding=c[0].getBoundingClientRect().width?!0:!1;c.remove()}h.extend(a.oBrowser,m.__browser);a.oScroll.iBarWidth=m.__browser.barWidth}function hb(a,b,c,d,e,f){var g,j=!1;c!==k&&(g=c,j=!0);for(;d!==e;)a.hasOwnProperty(d)&&
(g=j?b(g,a[d],d,a):a[d],j=!0,d+=f);return g}function Ea(a,b){var c=m.defaults.column,d=a.aoColumns.length,c=h.extend({},m.models.oColumn,c,{nTh:b?b:I.createElement("th"),sTitle:c.sTitle?c.sTitle:b?b.innerHTML:"",aDataSort:c.aDataSort?c.aDataSort:[d],mData:c.mData?c.mData:d,idx:d});a.aoColumns.push(c);c=a.aoPreSearchCols;c[d]=h.extend({},m.models.oSearch,c[d]);ja(a,d,h(b).data())}function ja(a,b,c){var b=a.aoColumns[b],d=a.oClasses,e=h(b.nTh);if(!b.sWidthOrig){b.sWidthOrig=e.attr("width")||null;var f=
(e.attr("style")||"").match(/width:\s*(\d+[pxem%]+)/);f&&(b.sWidthOrig=f[1])}c!==k&&null!==c&&(fb(c),K(m.defaults.column,c),c.mDataProp!==k&&!c.mData&&(c.mData=c.mDataProp),c.sType&&(b._sManualType=c.sType),c.className&&!c.sClass&&(c.sClass=c.className),h.extend(b,c),E(b,c,"sWidth","sWidthOrig"),c.iDataSort!==k&&(b.aDataSort=[c.iDataSort]),E(b,c,"aDataSort"));var g=b.mData,j=Q(g),i=b.mRender?Q(b.mRender):null,c=function(a){return"string"===typeof a&&-1!==a.indexOf("@")};b._bAttrSrc=h.isPlainObject(g)&&
(c(g.sort)||c(g.type)||c(g.filter));b._setter=null;b.fnGetData=function(a,b,c){var d=j(a,b,k,c);return i&&b?i(d,b,a,c):d};b.fnSetData=function(a,b,c){return R(g)(a,b,c)};"number"!==typeof g&&(a._rowReadObject=!0);a.oFeatures.bSort||(b.bSortable=!1,e.addClass(d.sSortableNone));a=-1!==h.inArray("asc",b.asSorting);c=-1!==h.inArray("desc",b.asSorting);!b.bSortable||!a&&!c?(b.sSortingClass=d.sSortableNone,b.sSortingClassJUI=""):a&&!c?(b.sSortingClass=d.sSortableAsc,b.sSortingClassJUI=d.sSortJUIAscAllowed):
!a&&c?(b.sSortingClass=d.sSortableDesc,b.sSortingClassJUI=d.sSortJUIDescAllowed):(b.sSortingClass=d.sSortable,b.sSortingClassJUI=d.sSortJUI)}function Y(a){if(!1!==a.oFeatures.bAutoWidth){var b=a.aoColumns;Fa(a);for(var c=0,d=b.length;c<d;c++)b[c].nTh.style.width=b[c].sWidth}b=a.oScroll;(""!==b.sY||""!==b.sX)&&ka(a);u(a,null,"column-sizing",[a])}function Z(a,b){var c=la(a,"bVisible");return"number"===typeof c[b]?c[b]:null}function $(a,b){var c=la(a,"bVisible"),c=h.inArray(b,c);return-1!==c?c:null}
function aa(a){var b=0;h.each(a.aoColumns,function(a,d){d.bVisible&&"none"!==h(d.nTh).css("display")&&b++});return b}function la(a,b){var c=[];h.map(a.aoColumns,function(a,e){a[b]&&c.push(e)});return c}function Ga(a){var b=a.aoColumns,c=a.aoData,d=m.ext.type.detect,e,f,g,j,i,h,l,q,t;e=0;for(f=b.length;e<f;e++)if(l=b[e],t=[],!l.sType&&l._sManualType)l.sType=l._sManualType;else if(!l.sType){g=0;for(j=d.length;g<j;g++){i=0;for(h=c.length;i<h;i++){t[i]===k&&(t[i]=B(a,i,e,"type"));q=d[g](t[i],a);if(!q&&
g!==d.length-1)break;if("html"===q)break}if(q){l.sType=q;break}}l.sType||(l.sType="string")}}function ib(a,b,c,d){var e,f,g,j,i,n,l=a.aoColumns;if(b)for(e=b.length-1;0<=e;e--){n=b[e];var q=n.targets!==k?n.targets:n.aTargets;h.isArray(q)||(q=[q]);f=0;for(g=q.length;f<g;f++)if("number"===typeof q[f]&&0<=q[f]){for(;l.length<=q[f];)Ea(a);d(q[f],n)}else if("number"===typeof q[f]&&0>q[f])d(l.length+q[f],n);else if("string"===typeof q[f]){j=0;for(i=l.length;j<i;j++)("_all"==q[f]||h(l[j].nTh).hasClass(q[f]))&&
d(j,n)}}if(c){e=0;for(a=c.length;e<a;e++)d(e,c[e])}}function N(a,b,c,d){var e=a.aoData.length,f=h.extend(!0,{},m.models.oRow,{src:c?"dom":"data",idx:e});f._aData=b;a.aoData.push(f);for(var g=a.aoColumns,j=0,i=g.length;j<i;j++)g[j].sType=null;a.aiDisplayMaster.push(e);b=a.rowIdFn(b);b!==k&&(a.aIds[b]=f);(c||!a.oFeatures.bDeferRender)&&Ha(a,e,c,d);return e}function ma(a,b){var c;b instanceof h||(b=h(b));return b.map(function(b,e){c=Ia(a,e);return N(a,c.data,e,c.cells)})}function B(a,b,c,d){var e=a.iDraw,
f=a.aoColumns[c],g=a.aoData[b]._aData,j=f.sDefaultContent,i=f.fnGetData(g,d,{settings:a,row:b,col:c});if(i===k)return a.iDrawError!=e&&null===j&&(L(a,0,"Requested unknown parameter "+("function"==typeof f.mData?"{function}":"'"+f.mData+"'")+" for row "+b+", column "+c,4),a.iDrawError=e),j;if((i===g||null===i)&&null!==j&&d!==k)i=j;else if("function"===typeof i)return i.call(g);return null===i&&"display"==d?"":i}function jb(a,b,c,d){a.aoColumns[c].fnSetData(a.aoData[b]._aData,d,{settings:a,row:b,col:c})}
function Ja(a){return h.map(a.match(/(\\.|[^\.])+/g)||[""],function(a){return a.replace(/\\./g,".")})}function Q(a){if(h.isPlainObject(a)){var b={};h.each(a,function(a,c){c&&(b[a]=Q(c))});return function(a,c,f,g){var j=b[c]||b._;return j!==k?j(a,c,f,g):a}}if(null===a)return function(a){return a};if("function"===typeof a)return function(b,c,f,g){return a(b,c,f,g)};if("string"===typeof a&&(-1!==a.indexOf(".")||-1!==a.indexOf("[")||-1!==a.indexOf("("))){var c=function(a,b,f){var g,j;if(""!==f){j=Ja(f);
for(var i=0,n=j.length;i<n;i++){f=j[i].match(ba);g=j[i].match(U);if(f){j[i]=j[i].replace(ba,"");""!==j[i]&&(a=a[j[i]]);g=[];j.splice(0,i+1);j=j.join(".");if(h.isArray(a)){i=0;for(n=a.length;i<n;i++)g.push(c(a[i],b,j))}a=f[0].substring(1,f[0].length-1);a=""===a?g:g.join(a);break}else if(g){j[i]=j[i].replace(U,"");a=a[j[i]]();continue}if(null===a||a[j[i]]===k)return k;a=a[j[i]]}}return a};return function(b,e){return c(b,e,a)}}return function(b){return b[a]}}function R(a){if(h.isPlainObject(a))return R(a._);
if(null===a)return function(){};if("function"===typeof a)return function(b,d,e){a(b,"set",d,e)};if("string"===typeof a&&(-1!==a.indexOf(".")||-1!==a.indexOf("[")||-1!==a.indexOf("("))){var b=function(a,d,e){var e=Ja(e),f;f=e[e.length-1];for(var g,j,i=0,n=e.length-1;i<n;i++){g=e[i].match(ba);j=e[i].match(U);if(g){e[i]=e[i].replace(ba,"");a[e[i]]=[];f=e.slice();f.splice(0,i+1);g=f.join(".");if(h.isArray(d)){j=0;for(n=d.length;j<n;j++)f={},b(f,d[j],g),a[e[i]].push(f)}else a[e[i]]=d;return}j&&(e[i]=e[i].replace(U,
""),a=a[e[i]](d));if(null===a[e[i]]||a[e[i]]===k)a[e[i]]={};a=a[e[i]]}if(f.match(U))a[f.replace(U,"")](d);else a[f.replace(ba,"")]=d};return function(c,d){return b(c,d,a)}}return function(b,d){b[a]=d}}function Ka(a){return G(a.aoData,"_aData")}function na(a){a.aoData.length=0;a.aiDisplayMaster.length=0;a.aiDisplay.length=0;a.aIds={}}function oa(a,b,c){for(var d=-1,e=0,f=a.length;e<f;e++)a[e]==b?d=e:a[e]>b&&a[e]--; -1!=d&&c===k&&a.splice(d,1)}function ca(a,b,c,d){var e=a.aoData[b],f,g=function(c,d){for(;c.childNodes.length;)c.removeChild(c.firstChild);
c.innerHTML=B(a,b,d,"display")};if("dom"===c||(!c||"auto"===c)&&"dom"===e.src)e._aData=Ia(a,e,d,d===k?k:e._aData).data;else{var j=e.anCells;if(j)if(d!==k)g(j[d],d);else{c=0;for(f=j.length;c<f;c++)g(j[c],c)}}e._aSortData=null;e._aFilterData=null;g=a.aoColumns;if(d!==k)g[d].sType=null;else{c=0;for(f=g.length;c<f;c++)g[c].sType=null;La(a,e)}}function Ia(a,b,c,d){var e=[],f=b.firstChild,g,j,i=0,n,l=a.aoColumns,q=a._rowReadObject,d=d!==k?d:q?{}:[],t=function(a,b){if("string"===typeof a){var c=a.indexOf("@");
-1!==c&&(c=a.substring(c+1),R(a)(d,b.getAttribute(c)))}},S=function(a){if(c===k||c===i)j=l[i],n=h.trim(a.innerHTML),j&&j._bAttrSrc?(R(j.mData._)(d,n),t(j.mData.sort,a),t(j.mData.type,a),t(j.mData.filter,a)):q?(j._setter||(j._setter=R(j.mData)),j._setter(d,n)):d[i]=n;i++};if(f)for(;f;){g=f.nodeName.toUpperCase();if("TD"==g||"TH"==g)S(f),e.push(f);f=f.nextSibling}else{e=b.anCells;f=0;for(g=e.length;f<g;f++)S(e[f])}if(b=b.firstChild?b:b.nTr)(b=b.getAttribute("id"))&&R(a.rowId)(d,b);return{data:d,cells:e}}
function Ha(a,b,c,d){var e=a.aoData[b],f=e._aData,g=[],j,i,n,l,q;if(null===e.nTr){j=c||I.createElement("tr");e.nTr=j;e.anCells=g;j._DT_RowIndex=b;La(a,e);l=0;for(q=a.aoColumns.length;l<q;l++){n=a.aoColumns[l];i=c?d[l]:I.createElement(n.sCellType);i._DT_CellIndex={row:b,column:l};g.push(i);if((!c||n.mRender||n.mData!==l)&&(!h.isPlainObject(n.mData)||n.mData._!==l+".display"))i.innerHTML=B(a,b,l,"display");n.sClass&&(i.className+=" "+n.sClass);n.bVisible&&!c?j.appendChild(i):!n.bVisible&&c&&i.parentNode.removeChild(i);
n.fnCreatedCell&&n.fnCreatedCell.call(a.oInstance,i,B(a,b,l),f,b,l)}u(a,"aoRowCreatedCallback",null,[j,f,b])}e.nTr.setAttribute("role","row")}function La(a,b){var c=b.nTr,d=b._aData;if(c){var e=a.rowIdFn(d);e&&(c.id=e);d.DT_RowClass&&(e=d.DT_RowClass.split(" "),b.__rowc=b.__rowc?pa(b.__rowc.concat(e)):e,h(c).removeClass(b.__rowc.join(" ")).addClass(d.DT_RowClass));d.DT_RowAttr&&h(c).attr(d.DT_RowAttr);d.DT_RowData&&h(c).data(d.DT_RowData)}}function kb(a){var b,c,d,e,f,g=a.nTHead,j=a.nTFoot,i=0===
h("th, td",g).length,n=a.oClasses,l=a.aoColumns;i&&(e=h("<tr/>").appendTo(g));b=0;for(c=l.length;b<c;b++)f=l[b],d=h(f.nTh).addClass(f.sClass),i&&d.appendTo(e),a.oFeatures.bSort&&(d.addClass(f.sSortingClass),!1!==f.bSortable&&(d.attr("tabindex",a.iTabIndex).attr("aria-controls",a.sTableId),Ma(a,f.nTh,b))),f.sTitle!=d[0].innerHTML&&d.html(f.sTitle),Na(a,"header")(a,d,f,n);i&&da(a.aoHeader,g);h(g).find(">tr").attr("role","row");h(g).find(">tr>th, >tr>td").addClass(n.sHeaderTH);h(j).find(">tr>th, >tr>td").addClass(n.sFooterTH);
if(null!==j){a=a.aoFooter[0];b=0;for(c=a.length;b<c;b++)f=l[b],f.nTf=a[b].cell,f.sClass&&h(f.nTf).addClass(f.sClass)}}function ea(a,b,c){var d,e,f,g=[],j=[],i=a.aoColumns.length,n;if(b){c===k&&(c=!1);d=0;for(e=b.length;d<e;d++){g[d]=b[d].slice();g[d].nTr=b[d].nTr;for(f=i-1;0<=f;f--)!a.aoColumns[f].bVisible&&!c&&g[d].splice(f,1);j.push([])}d=0;for(e=g.length;d<e;d++){if(a=g[d].nTr)for(;f=a.firstChild;)a.removeChild(f);f=0;for(b=g[d].length;f<b;f++)if(n=i=1,j[d][f]===k){a.appendChild(g[d][f].cell);
for(j[d][f]=1;g[d+i]!==k&&g[d][f].cell==g[d+i][f].cell;)j[d+i][f]=1,i++;for(;g[d][f+n]!==k&&g[d][f].cell==g[d][f+n].cell;){for(c=0;c<i;c++)j[d+c][f+n]=1;n++}h(g[d][f].cell).attr("rowspan",i).attr("colspan",n)}}}}function O(a){var b=u(a,"aoPreDrawCallback","preDraw",[a]);if(-1!==h.inArray(!1,b))C(a,!1);else{var b=[],c=0,d=a.asStripeClasses,e=d.length,f=a.oLanguage,g=a.iInitDisplayStart,j="ssp"==y(a),i=a.aiDisplay;a.bDrawing=!0;g!==k&&-1!==g&&(a._iDisplayStart=j?g:g>=a.fnRecordsDisplay()?0:g,a.iInitDisplayStart=
-1);var g=a._iDisplayStart,n=a.fnDisplayEnd();if(a.bDeferLoading)a.bDeferLoading=!1,a.iDraw++,C(a,!1);else if(j){if(!a.bDestroying&&!lb(a))return}else a.iDraw++;if(0!==i.length){f=j?a.aoData.length:n;for(j=j?0:g;j<f;j++){var l=i[j],q=a.aoData[l];null===q.nTr&&Ha(a,l);l=q.nTr;if(0!==e){var t=d[c%e];q._sRowStripe!=t&&(h(l).removeClass(q._sRowStripe).addClass(t),q._sRowStripe=t)}u(a,"aoRowCallback",null,[l,q._aData,c,j]);b.push(l);c++}}else c=f.sZeroRecords,1==a.iDraw&&"ajax"==y(a)?c=f.sLoadingRecords:
f.sEmptyTable&&0===a.fnRecordsTotal()&&(c=f.sEmptyTable),b[0]=h("<tr/>",{"class":e?d[0]:""}).append(h("<td />",{valign:"top",colSpan:aa(a),"class":a.oClasses.sRowEmpty}).html(c))[0];u(a,"aoHeaderCallback","header",[h(a.nTHead).children("tr")[0],Ka(a),g,n,i]);u(a,"aoFooterCallback","footer",[h(a.nTFoot).children("tr")[0],Ka(a),g,n,i]);d=h(a.nTBody);d.children().detach();d.append(h(b));u(a,"aoDrawCallback","draw",[a]);a.bSorted=!1;a.bFiltered=!1;a.bDrawing=!1}}function T(a,b){var c=a.oFeatures,d=c.bFilter;
c.bSort&&mb(a);d?fa(a,a.oPreviousSearch):a.aiDisplay=a.aiDisplayMaster.slice();!0!==b&&(a._iDisplayStart=0);a._drawHold=b;O(a);a._drawHold=!1}function nb(a){var b=a.oClasses,c=h(a.nTable),c=h("<div/>").insertBefore(c),d=a.oFeatures,e=h("<div/>",{id:a.sTableId+"_wrapper","class":b.sWrapper+(a.nTFoot?"":" "+b.sNoFooter)});a.nHolding=c[0];a.nTableWrapper=e[0];a.nTableReinsertBefore=a.nTable.nextSibling;for(var f=a.sDom.split(""),g,j,i,n,l,q,t=0;t<f.length;t++){g=null;j=f[t];if("<"==j){i=h("<div/>")[0];
n=f[t+1];if("'"==n||'"'==n){l="";for(q=2;f[t+q]!=n;)l+=f[t+q],q++;"H"==l?l=b.sJUIHeader:"F"==l&&(l=b.sJUIFooter);-1!=l.indexOf(".")?(n=l.split("."),i.id=n[0].substr(1,n[0].length-1),i.className=n[1]):"#"==l.charAt(0)?i.id=l.substr(1,l.length-1):i.className=l;t+=q}e.append(i);e=h(i)}else if(">"==j)e=e.parent();else if("l"==j&&d.bPaginate&&d.bLengthChange)g=ob(a);else if("f"==j&&d.bFilter)g=pb(a);else if("r"==j&&d.bProcessing)g=qb(a);else if("t"==j)g=rb(a);else if("i"==j&&d.bInfo)g=sb(a);else if("p"==
j&&d.bPaginate)g=tb(a);else if(0!==m.ext.feature.length){i=m.ext.feature;q=0;for(n=i.length;q<n;q++)if(j==i[q].cFeature){g=i[q].fnInit(a);break}}g&&(i=a.aanFeatures,i[j]||(i[j]=[]),i[j].push(g),e.append(g))}c.replaceWith(e);a.nHolding=null}function da(a,b){var c=h(b).children("tr"),d,e,f,g,j,i,n,l,q,t;a.splice(0,a.length);f=0;for(i=c.length;f<i;f++)a.push([]);f=0;for(i=c.length;f<i;f++){d=c[f];for(e=d.firstChild;e;){if("TD"==e.nodeName.toUpperCase()||"TH"==e.nodeName.toUpperCase()){l=1*e.getAttribute("colspan");
q=1*e.getAttribute("rowspan");l=!l||0===l||1===l?1:l;q=!q||0===q||1===q?1:q;g=0;for(j=a[f];j[g];)g++;n=g;t=1===l?!0:!1;for(j=0;j<l;j++)for(g=0;g<q;g++)a[f+g][n+j]={cell:e,unique:t},a[f+g].nTr=d}e=e.nextSibling}}}function qa(a,b,c){var d=[];c||(c=a.aoHeader,b&&(c=[],da(c,b)));for(var b=0,e=c.length;b<e;b++)for(var f=0,g=c[b].length;f<g;f++)if(c[b][f].unique&&(!d[f]||!a.bSortCellsTop))d[f]=c[b][f].cell;return d}function ra(a,b,c){u(a,"aoServerParams","serverParams",[b]);if(b&&h.isArray(b)){var d={},
e=/(.*?)\[\]$/;h.each(b,function(a,b){var c=b.name.match(e);c?(c=c[0],d[c]||(d[c]=[]),d[c].push(b.value)):d[b.name]=b.value});b=d}var f,g=a.ajax,j=a.oInstance,i=function(b){u(a,null,"xhr",[a,b,a.jqXHR]);c(b)};if(h.isPlainObject(g)&&g.data){f=g.data;var n=h.isFunction(f)?f(b,a):f,b=h.isFunction(f)&&n?n:h.extend(!0,b,n);delete g.data}n={data:b,success:function(b){var c=b.error||b.sError;c&&L(a,0,c);a.json=b;i(b)},dataType:"json",cache:!1,type:a.sServerMethod,error:function(b,c){var d=u(a,null,"xhr",
[a,null,a.jqXHR]);-1===h.inArray(!0,d)&&("parsererror"==c?L(a,0,"Invalid JSON response",1):4===b.readyState&&L(a,0,"Ajax error",7));C(a,!1)}};a.oAjaxData=b;u(a,null,"preXhr",[a,b]);a.fnServerData?a.fnServerData.call(j,a.sAjaxSource,h.map(b,function(a,b){return{name:b,value:a}}),i,a):a.sAjaxSource||"string"===typeof g?a.jqXHR=h.ajax(h.extend(n,{url:g||a.sAjaxSource})):h.isFunction(g)?a.jqXHR=g.call(j,b,i,a):(a.jqXHR=h.ajax(h.extend(n,g)),g.data=f)}function lb(a){return a.bAjaxDataGet?(a.iDraw++,C(a,
!0),ra(a,ub(a),function(b){vb(a,b)}),!1):!0}function ub(a){var b=a.aoColumns,c=b.length,d=a.oFeatures,e=a.oPreviousSearch,f=a.aoPreSearchCols,g,j=[],i,n,l,q=V(a);g=a._iDisplayStart;i=!1!==d.bPaginate?a._iDisplayLength:-1;var k=function(a,b){j.push({name:a,value:b})};k("sEcho",a.iDraw);k("iColumns",c);k("sColumns",G(b,"sName").join(","));k("iDisplayStart",g);k("iDisplayLength",i);var S={draw:a.iDraw,columns:[],order:[],start:g,length:i,search:{value:e.sSearch,regex:e.bRegex}};for(g=0;g<c;g++)n=b[g],
l=f[g],i="function"==typeof n.mData?"function":n.mData,S.columns.push({data:i,name:n.sName,searchable:n.bSearchable,orderable:n.bSortable,search:{value:l.sSearch,regex:l.bRegex}}),k("mDataProp_"+g,i),d.bFilter&&(k("sSearch_"+g,l.sSearch),k("bRegex_"+g,l.bRegex),k("bSearchable_"+g,n.bSearchable)),d.bSort&&k("bSortable_"+g,n.bSortable);d.bFilter&&(k("sSearch",e.sSearch),k("bRegex",e.bRegex));d.bSort&&(h.each(q,function(a,b){S.order.push({column:b.col,dir:b.dir});k("iSortCol_"+a,b.col);k("sSortDir_"+
a,b.dir)}),k("iSortingCols",q.length));b=m.ext.legacy.ajax;return null===b?a.sAjaxSource?j:S:b?j:S}function vb(a,b){var c=sa(a,b),d=b.sEcho!==k?b.sEcho:b.draw,e=b.iTotalRecords!==k?b.iTotalRecords:b.recordsTotal,f=b.iTotalDisplayRecords!==k?b.iTotalDisplayRecords:b.recordsFiltered;if(d){if(1*d<a.iDraw)return;a.iDraw=1*d}na(a);a._iRecordsTotal=parseInt(e,10);a._iRecordsDisplay=parseInt(f,10);d=0;for(e=c.length;d<e;d++)N(a,c[d]);a.aiDisplay=a.aiDisplayMaster.slice();a.bAjaxDataGet=!1;O(a);a._bInitComplete||
ta(a,b);a.bAjaxDataGet=!0;C(a,!1)}function sa(a,b){var c=h.isPlainObject(a.ajax)&&a.ajax.dataSrc!==k?a.ajax.dataSrc:a.sAjaxDataProp;return"data"===c?b.aaData||b[c]:""!==c?Q(c)(b):b}function pb(a){var b=a.oClasses,c=a.sTableId,d=a.oLanguage,e=a.oPreviousSearch,f=a.aanFeatures,g='<input type="search" class="'+b.sFilterInput+'"/>',j=d.sSearch,j=j.match(/_INPUT_/)?j.replace("_INPUT_",g):j+g,b=h("<div/>",{id:!f.f?c+"_filter":null,"class":b.sFilter}).append(h("<label/>").append(j)),f=function(){var b=!this.value?
"":this.value;b!=e.sSearch&&(fa(a,{sSearch:b,bRegex:e.bRegex,bSmart:e.bSmart,bCaseInsensitive:e.bCaseInsensitive}),a._iDisplayStart=0,O(a))},g=null!==a.searchDelay?a.searchDelay:"ssp"===y(a)?400:0,i=h("input",b).val(e.sSearch).attr("placeholder",d.sSearchPlaceholder).bind("keyup.DT search.DT input.DT paste.DT cut.DT",g?Oa(f,g):f).bind("keypress.DT",function(a){if(13==a.keyCode)return!1}).attr("aria-controls",c);h(a.nTable).on("search.dt.DT",function(b,c){if(a===c)try{i[0]!==I.activeElement&&i.val(e.sSearch)}catch(d){}});
return b[0]}function fa(a,b,c){var d=a.oPreviousSearch,e=a.aoPreSearchCols,f=function(a){d.sSearch=a.sSearch;d.bRegex=a.bRegex;d.bSmart=a.bSmart;d.bCaseInsensitive=a.bCaseInsensitive};Ga(a);if("ssp"!=y(a)){wb(a,b.sSearch,c,b.bEscapeRegex!==k?!b.bEscapeRegex:b.bRegex,b.bSmart,b.bCaseInsensitive);f(b);for(b=0;b<e.length;b++)xb(a,e[b].sSearch,b,e[b].bEscapeRegex!==k?!e[b].bEscapeRegex:e[b].bRegex,e[b].bSmart,e[b].bCaseInsensitive);yb(a)}else f(b);a.bFiltered=!0;u(a,null,"search",[a])}function yb(a){for(var b=
m.ext.search,c=a.aiDisplay,d,e,f=0,g=b.length;f<g;f++){for(var j=[],i=0,n=c.length;i<n;i++)e=c[i],d=a.aoData[e],b[f](a,d._aFilterData,e,d._aData,i)&&j.push(e);c.length=0;h.merge(c,j)}}function xb(a,b,c,d,e,f){if(""!==b)for(var g=a.aiDisplay,d=Pa(b,d,e,f),e=g.length-1;0<=e;e--)b=a.aoData[g[e]]._aFilterData[c],d.test(b)||g.splice(e,1)}function wb(a,b,c,d,e,f){var d=Pa(b,d,e,f),e=a.oPreviousSearch.sSearch,f=a.aiDisplayMaster,g;0!==m.ext.search.length&&(c=!0);g=zb(a);if(0>=b.length)a.aiDisplay=f.slice();
else{if(g||c||e.length>b.length||0!==b.indexOf(e)||a.bSorted)a.aiDisplay=f.slice();b=a.aiDisplay;for(c=b.length-1;0<=c;c--)d.test(a.aoData[b[c]]._sFilterRow)||b.splice(c,1)}}function Pa(a,b,c,d){a=b?a:Qa(a);c&&(a="^(?=.*?"+h.map(a.match(/"[^"]+"|[^ ]+/g)||[""],function(a){if('"'===a.charAt(0))var b=a.match(/^"(.*)"$/),a=b?b[1]:a;return a.replace('"',"")}).join(")(?=.*?")+").*$");return RegExp(a,d?"i":"")}function zb(a){var b=a.aoColumns,c,d,e,f,g,j,i,h,l=m.ext.type.search;c=!1;d=0;for(f=a.aoData.length;d<
f;d++)if(h=a.aoData[d],!h._aFilterData){j=[];e=0;for(g=b.length;e<g;e++)c=b[e],c.bSearchable?(i=B(a,d,e,"filter"),l[c.sType]&&(i=l[c.sType](i)),null===i&&(i=""),"string"!==typeof i&&i.toString&&(i=i.toString())):i="",i.indexOf&&-1!==i.indexOf("&")&&(ua.innerHTML=i,i=Zb?ua.textContent:ua.innerText),i.replace&&(i=i.replace(/[\r\n]/g,"")),j.push(i);h._aFilterData=j;h._sFilterRow=j.join(" ");c=!0}return c}function Ab(a){return{search:a.sSearch,smart:a.bSmart,regex:a.bRegex,caseInsensitive:a.bCaseInsensitive}}
function Bb(a){return{sSearch:a.search,bSmart:a.smart,bRegex:a.regex,bCaseInsensitive:a.caseInsensitive}}function sb(a){var b=a.sTableId,c=a.aanFeatures.i,d=h("<div/>",{"class":a.oClasses.sInfo,id:!c?b+"_info":null});c||(a.aoDrawCallback.push({fn:Cb,sName:"information"}),d.attr("role","status").attr("aria-live","polite"),h(a.nTable).attr("aria-describedby",b+"_info"));return d[0]}function Cb(a){var b=a.aanFeatures.i;if(0!==b.length){var c=a.oLanguage,d=a._iDisplayStart+1,e=a.fnDisplayEnd(),f=a.fnRecordsTotal(),
g=a.fnRecordsDisplay(),j=g?c.sInfo:c.sInfoEmpty;g!==f&&(j+=" "+c.sInfoFiltered);j+=c.sInfoPostFix;j=Db(a,j);c=c.fnInfoCallback;null!==c&&(j=c.call(a.oInstance,a,d,e,f,g,j));h(b).html(j)}}function Db(a,b){var c=a.fnFormatNumber,d=a._iDisplayStart+1,e=a._iDisplayLength,f=a.fnRecordsDisplay(),g=-1===e;return b.replace(/_START_/g,c.call(a,d)).replace(/_END_/g,c.call(a,a.fnDisplayEnd())).replace(/_MAX_/g,c.call(a,a.fnRecordsTotal())).replace(/_TOTAL_/g,c.call(a,f)).replace(/_PAGE_/g,c.call(a,g?1:Math.ceil(d/
e))).replace(/_PAGES_/g,c.call(a,g?1:Math.ceil(f/e)))}function ga(a){var b,c,d=a.iInitDisplayStart,e=a.aoColumns,f;c=a.oFeatures;var g=a.bDeferLoading;if(a.bInitialised){nb(a);kb(a);ea(a,a.aoHeader);ea(a,a.aoFooter);C(a,!0);c.bAutoWidth&&Fa(a);b=0;for(c=e.length;b<c;b++)f=e[b],f.sWidth&&(f.nTh.style.width=x(f.sWidth));u(a,null,"preInit",[a]);T(a);e=y(a);if("ssp"!=e||g)"ajax"==e?ra(a,[],function(c){var f=sa(a,c);for(b=0;b<f.length;b++)N(a,f[b]);a.iInitDisplayStart=d;T(a);C(a,!1);ta(a,c)},a):(C(a,!1),
ta(a))}else setTimeout(function(){ga(a)},200)}function ta(a,b){a._bInitComplete=!0;(b||a.oInit.aaData)&&Y(a);u(a,null,"plugin-init",[a,b]);u(a,"aoInitComplete","init",[a,b])}function Ra(a,b){var c=parseInt(b,10);a._iDisplayLength=c;Sa(a);u(a,null,"length",[a,c])}function ob(a){for(var b=a.oClasses,c=a.sTableId,d=a.aLengthMenu,e=h.isArray(d[0]),f=e?d[0]:d,d=e?d[1]:d,e=h("<select/>",{name:c+"_length","aria-controls":c,"class":b.sLengthSelect}),g=0,j=f.length;g<j;g++)e[0][g]=new Option(d[g],f[g]);var i=
h("<div><label/></div>").addClass(b.sLength);a.aanFeatures.l||(i[0].id=c+"_length");i.children().append(a.oLanguage.sLengthMenu.replace("_MENU_",e[0].outerHTML));h("select",i).val(a._iDisplayLength).bind("change.DT",function(){Ra(a,h(this).val());O(a)});h(a.nTable).bind("length.dt.DT",function(b,c,d){a===c&&h("select",i).val(d)});return i[0]}function tb(a){var b=a.sPaginationType,c=m.ext.pager[b],d="function"===typeof c,e=function(a){O(a)},b=h("<div/>").addClass(a.oClasses.sPaging+b)[0],f=a.aanFeatures;
d||c.fnInit(a,b,e);f.p||(b.id=a.sTableId+"_paginate",a.aoDrawCallback.push({fn:function(a){if(d){var b=a._iDisplayStart,i=a._iDisplayLength,h=a.fnRecordsDisplay(),l=-1===i,b=l?0:Math.ceil(b/i),i=l?1:Math.ceil(h/i),h=c(b,i),k,l=0;for(k=f.p.length;l<k;l++)Na(a,"pageButton")(a,f.p[l],l,h,b,i)}else c.fnUpdate(a,e)},sName:"pagination"}));return b}function Ta(a,b,c){var d=a._iDisplayStart,e=a._iDisplayLength,f=a.fnRecordsDisplay();0===f||-1===e?d=0:"number"===typeof b?(d=b*e,d>f&&(d=0)):"first"==b?d=0:
"previous"==b?(d=0<=e?d-e:0,0>d&&(d=0)):"next"==b?d+e<f&&(d+=e):"last"==b?d=Math.floor((f-1)/e)*e:L(a,0,"Unknown paging action: "+b,5);b=a._iDisplayStart!==d;a._iDisplayStart=d;b&&(u(a,null,"page",[a]),c&&O(a));return b}function qb(a){return h("<div/>",{id:!a.aanFeatures.r?a.sTableId+"_processing":null,"class":a.oClasses.sProcessing}).html(a.oLanguage.sProcessing).insertBefore(a.nTable)[0]}function C(a,b){a.oFeatures.bProcessing&&h(a.aanFeatures.r).css("display",b?"block":"none");u(a,null,"processing",
[a,b])}function rb(a){var b=h(a.nTable);b.attr("role","grid");var c=a.oScroll;if(""===c.sX&&""===c.sY)return a.nTable;var d=c.sX,e=c.sY,f=a.oClasses,g=b.children("caption"),j=g.length?g[0]._captionSide:null,i=h(b[0].cloneNode(!1)),n=h(b[0].cloneNode(!1)),l=b.children("tfoot");l.length||(l=null);i=h("<div/>",{"class":f.sScrollWrapper}).append(h("<div/>",{"class":f.sScrollHead}).css({overflow:"hidden",position:"relative",border:0,width:d?!d?null:x(d):"100%"}).append(h("<div/>",{"class":f.sScrollHeadInner}).css({"box-sizing":"content-box",
width:c.sXInner||"100%"}).append(i.removeAttr("id").css("margin-left",0).append("top"===j?g:null).append(b.children("thead"))))).append(h("<div/>",{"class":f.sScrollBody}).css({position:"relative",overflow:"auto",width:!d?null:x(d)}).append(b));l&&i.append(h("<div/>",{"class":f.sScrollFoot}).css({overflow:"hidden",border:0,width:d?!d?null:x(d):"100%"}).append(h("<div/>",{"class":f.sScrollFootInner}).append(n.removeAttr("id").css("margin-left",0).append("bottom"===j?g:null).append(b.children("tfoot")))));
var b=i.children(),k=b[0],f=b[1],t=l?b[2]:null;if(d)h(f).on("scroll.DT",function(){var a=this.scrollLeft;k.scrollLeft=a;l&&(t.scrollLeft=a)});h(f).css(e&&c.bCollapse?"max-height":"height",e);a.nScrollHead=k;a.nScrollBody=f;a.nScrollFoot=t;a.aoDrawCallback.push({fn:ka,sName:"scrolling"});return i[0]}function ka(a){var b=a.oScroll,c=b.sX,d=b.sXInner,e=b.sY,b=b.iBarWidth,f=h(a.nScrollHead),g=f[0].style,j=f.children("div"),i=j[0].style,n=j.children("table"),j=a.nScrollBody,l=h(j),q=j.style,t=h(a.nScrollFoot).children("div"),
m=t.children("table"),o=h(a.nTHead),F=h(a.nTable),p=F[0],r=p.style,u=a.nTFoot?h(a.nTFoot):null,Eb=a.oBrowser,Ua=Eb.bScrollOversize,s=G(a.aoColumns,"nTh"),P,v,w,y,z=[],A=[],B=[],C=[],D,E=function(a){a=a.style;a.paddingTop="0";a.paddingBottom="0";a.borderTopWidth="0";a.borderBottomWidth="0";a.height=0};v=j.scrollHeight>j.clientHeight;if(a.scrollBarVis!==v&&a.scrollBarVis!==k)a.scrollBarVis=v,Y(a);else{a.scrollBarVis=v;F.children("thead, tfoot").remove();u&&(w=u.clone().prependTo(F),P=u.find("tr"),w=
w.find("tr"));y=o.clone().prependTo(F);o=o.find("tr");v=y.find("tr");y.find("th, td").removeAttr("tabindex");c||(q.width="100%",f[0].style.width="100%");h.each(qa(a,y),function(b,c){D=Z(a,b);c.style.width=a.aoColumns[D].sWidth});u&&J(function(a){a.style.width=""},w);f=F.outerWidth();if(""===c){r.width="100%";if(Ua&&(F.find("tbody").height()>j.offsetHeight||"scroll"==l.css("overflow-y")))r.width=x(F.outerWidth()-b);f=F.outerWidth()}else""!==d&&(r.width=x(d),f=F.outerWidth());J(E,v);J(function(a){B.push(a.innerHTML);
z.push(x(h(a).css("width")))},v);J(function(a,b){if(h.inArray(a,s)!==-1)a.style.width=z[b]},o);h(v).height(0);u&&(J(E,w),J(function(a){C.push(a.innerHTML);A.push(x(h(a).css("width")))},w),J(function(a,b){a.style.width=A[b]},P),h(w).height(0));J(function(a,b){a.innerHTML='<div class="dataTables_sizing" style="height:0;overflow:hidden;">'+B[b]+"</div>";a.style.width=z[b]},v);u&&J(function(a,b){a.innerHTML='<div class="dataTables_sizing" style="height:0;overflow:hidden;">'+C[b]+"</div>";a.style.width=
A[b]},w);if(F.outerWidth()<f){P=j.scrollHeight>j.offsetHeight||"scroll"==l.css("overflow-y")?f+b:f;if(Ua&&(j.scrollHeight>j.offsetHeight||"scroll"==l.css("overflow-y")))r.width=x(P-b);(""===c||""!==d)&&L(a,1,"Possible column misalignment",6)}else P="100%";q.width=x(P);g.width=x(P);u&&(a.nScrollFoot.style.width=x(P));!e&&Ua&&(q.height=x(p.offsetHeight+b));c=F.outerWidth();n[0].style.width=x(c);i.width=x(c);d=F.height()>j.clientHeight||"scroll"==l.css("overflow-y");e="padding"+(Eb.bScrollbarLeft?"Left":
"Right");i[e]=d?b+"px":"0px";u&&(m[0].style.width=x(c),t[0].style.width=x(c),t[0].style[e]=d?b+"px":"0px");F.children("colgroup").insertBefore(F.children("thead"));l.scroll();if((a.bSorted||a.bFiltered)&&!a._drawHold)j.scrollTop=0}}function J(a,b,c){for(var d=0,e=0,f=b.length,g,j;e<f;){g=b[e].firstChild;for(j=c?c[e].firstChild:null;g;)1===g.nodeType&&(c?a(g,j,d):a(g,d),d++),g=g.nextSibling,j=c?j.nextSibling:null;e++}}function Fa(a){var b=a.nTable,c=a.aoColumns,d=a.oScroll,e=d.sY,f=d.sX,g=d.sXInner,
j=c.length,i=la(a,"bVisible"),n=h("th",a.nTHead),l=b.getAttribute("width"),k=b.parentNode,t=!1,m,o,p=a.oBrowser,d=p.bScrollOversize;(m=b.style.width)&&-1!==m.indexOf("%")&&(l=m);for(m=0;m<i.length;m++)o=c[i[m]],null!==o.sWidth&&(o.sWidth=Fb(o.sWidthOrig,k),t=!0);if(d||!t&&!f&&!e&&j==aa(a)&&j==n.length)for(m=0;m<j;m++)i=Z(a,m),null!==i&&(c[i].sWidth=x(n.eq(m).width()));else{j=h(b).clone().css("visibility","hidden").removeAttr("id");j.find("tbody tr").remove();var r=h("<tr/>").appendTo(j.find("tbody"));
j.find("thead, tfoot").remove();j.append(h(a.nTHead).clone()).append(h(a.nTFoot).clone());j.find("tfoot th, tfoot td").css("width","");n=qa(a,j.find("thead")[0]);for(m=0;m<i.length;m++)o=c[i[m]],n[m].style.width=null!==o.sWidthOrig&&""!==o.sWidthOrig?x(o.sWidthOrig):"",o.sWidthOrig&&f&&h(n[m]).append(h("<div/>").css({width:o.sWidthOrig,margin:0,padding:0,border:0,height:1}));if(a.aoData.length)for(m=0;m<i.length;m++)t=i[m],o=c[t],h(Gb(a,t)).clone(!1).append(o.sContentPadding).appendTo(r);h("[name]",
j).removeAttr("name");o=h("<div/>").css(f||e?{position:"absolute",top:0,left:0,height:1,right:0,overflow:"hidden"}:{}).append(j).appendTo(k);f&&g?j.width(g):f?(j.css("width","auto"),j.removeAttr("width"),j.width()<k.clientWidth&&l&&j.width(k.clientWidth)):e?j.width(k.clientWidth):l&&j.width(l);for(m=e=0;m<i.length;m++)k=h(n[m]),g=k.outerWidth()-k.width(),k=p.bBounding?Math.ceil(n[m].getBoundingClientRect().width):k.outerWidth(),e+=k,c[i[m]].sWidth=x(k-g);b.style.width=x(e);o.remove()}l&&(b.style.width=
x(l));if((l||f)&&!a._reszEvt)b=function(){h(D).bind("resize.DT-"+a.sInstance,Oa(function(){Y(a)}))},d?setTimeout(b,1E3):b(),a._reszEvt=!0}function Fb(a,b){if(!a)return 0;var c=h("<div/>").css("width",x(a)).appendTo(b||I.body),d=c[0].offsetWidth;c.remove();return d}function Gb(a,b){var c=Hb(a,b);if(0>c)return null;var d=a.aoData[c];return!d.nTr?h("<td/>").html(B(a,c,b,"display"))[0]:d.anCells[b]}function Hb(a,b){for(var c,d=-1,e=-1,f=0,g=a.aoData.length;f<g;f++)c=B(a,f,b,"display")+"",c=c.replace($b,
""),c=c.replace(/ /g," "),c.length>d&&(d=c.length,e=f);return e}function x(a){return null===a?"0px":"number"==typeof a?0>a?"0px":a+"px":a.match(/\d$/)?a+"px":a}function V(a){var b,c,d=[],e=a.aoColumns,f,g,j,i;b=a.aaSortingFixed;c=h.isPlainObject(b);var n=[];f=function(a){a.length&&!h.isArray(a[0])?n.push(a):h.merge(n,a)};h.isArray(b)&&f(b);c&&b.pre&&f(b.pre);f(a.aaSorting);c&&b.post&&f(b.post);for(a=0;a<n.length;a++){i=n[a][0];f=e[i].aDataSort;b=0;for(c=f.length;b<c;b++)g=f[b],j=e[g].sType||
"string",n[a]._idx===k&&(n[a]._idx=h.inArray(n[a][1],e[g].asSorting)),d.push({src:i,col:g,dir:n[a][1],index:n[a]._idx,type:j,formatter:m.ext.type.order[j+"-pre"]})}return d}function mb(a){var b,c,d=[],e=m.ext.type.order,f=a.aoData,g=0,j,i=a.aiDisplayMaster,h;Ga(a);h=V(a);b=0;for(c=h.length;b<c;b++)j=h[b],j.formatter&&g++,Ib(a,j.col);if("ssp"!=y(a)&&0!==h.length){b=0;for(c=i.length;b<c;b++)d[i[b]]=b;g===h.length?i.sort(function(a,b){var c,e,g,j,i=h.length,k=f[a]._aSortData,m=f[b]._aSortData;for(g=
0;g<i;g++)if(j=h[g],c=k[j.col],e=m[j.col],c=c<e?-1:c>e?1:0,0!==c)return"asc"===j.dir?c:-c;c=d[a];e=d[b];return c<e?-1:c>e?1:0}):i.sort(function(a,b){var c,g,j,i,k=h.length,m=f[a]._aSortData,p=f[b]._aSortData;for(j=0;j<k;j++)if(i=h[j],c=m[i.col],g=p[i.col],i=e[i.type+"-"+i.dir]||e["string-"+i.dir],c=i(c,g),0!==c)return c;c=d[a];g=d[b];return c<g?-1:c>g?1:0})}a.bSorted=!0}function Jb(a){for(var b,c,d=a.aoColumns,e=V(a),a=a.oLanguage.oAria,f=0,g=d.length;f<g;f++){c=d[f];var j=c.asSorting;b=c.sTitle.replace(/<.*?>/g,
"");var i=c.nTh;i.removeAttribute("aria-sort");c.bSortable&&(0<e.length&&e[0].col==f?(i.setAttribute("aria-sort","asc"==e[0].dir?"ascending":"descending"),c=j[e[0].index+1]||j[0]):c=j[0],b+="asc"===c?a.sSortAscending:a.sSortDescending);i.setAttribute("aria-label",b)}}function Va(a,b,c,d){var e=a.aaSorting,f=a.aoColumns[b].asSorting,g=function(a,b){var c=a._idx;c===k&&(c=h.inArray(a[1],f));return c+1<f.length?c+1:b?null:0};"number"===typeof e[0]&&(e=a.aaSorting=[e]);c&&a.oFeatures.bSortMulti?(c=h.inArray(b,
G(e,"0")),-1!==c?(b=g(e[c],!0),null===b&&1===e.length&&(b=0),null===b?e.splice(c,1):(e[c][1]=f[b],e[c]._idx=b)):(e.push([b,f[0],0]),e[e.length-1]._idx=0)):e.length&&e[0][0]==b?(b=g(e[0]),e.length=1,e[0][1]=f[b],e[0]._idx=b):(e.length=0,e.push([b,f[0]]),e[0]._idx=0);T(a);"function"==typeof d&&d(a)}function Ma(a,b,c,d){var e=a.aoColumns[c];Wa(b,{},function(b){!1!==e.bSortable&&(a.oFeatures.bProcessing?(C(a,!0),setTimeout(function(){Va(a,c,b.shiftKey,d);"ssp"!==y(a)&&C(a,!1)},0)):Va(a,c,b.shiftKey,d))})}
function va(a){var b=a.aLastSort,c=a.oClasses.sSortColumn,d=V(a),e=a.oFeatures,f,g;if(e.bSort&&e.bSortClasses){e=0;for(f=b.length;e<f;e++)g=b[e].src,h(G(a.aoData,"anCells",g)).removeClass(c+(2>e?e+1:3));e=0;for(f=d.length;e<f;e++)g=d[e].src,h(G(a.aoData,"anCells",g)).addClass(c+(2>e?e+1:3))}a.aLastSort=d}function Ib(a,b){var c=a.aoColumns[b],d=m.ext.order[c.sSortDataType],e;d&&(e=d.call(a.oInstance,a,b,$(a,b)));for(var f,g=m.ext.type.order[c.sType+"-pre"],j=0,i=a.aoData.length;j<i;j++)if(c=a.aoData[j],
c._aSortData||(c._aSortData=[]),!c._aSortData[b]||d)f=d?e[j]:B(a,j,b,"sort"),c._aSortData[b]=g?g(f):f}function wa(a){if(a.oFeatures.bStateSave&&!a.bDestroying){var b={time:+new Date,start:a._iDisplayStart,length:a._iDisplayLength,order:h.extend(!0,[],a.aaSorting),search:Ab(a.oPreviousSearch),columns:h.map(a.aoColumns,function(b,d){return{visible:b.bVisible,search:Ab(a.aoPreSearchCols[d])}})};u(a,"aoStateSaveParams","stateSaveParams",[a,b]);a.oSavedState=b;a.fnStateSaveCallback.call(a.oInstance,a,
b)}}function Kb(a){var b,c,d=a.aoColumns;if(a.oFeatures.bStateSave){var e=a.fnStateLoadCallback.call(a.oInstance,a);if(e&&e.time&&(b=u(a,"aoStateLoadParams","stateLoadParams",[a,e]),-1===h.inArray(!1,b)&&(b=a.iStateDuration,!(0<b&&e.time<+new Date-1E3*b)&&d.length===e.columns.length))){a.oLoadedState=h.extend(!0,{},e);e.start!==k&&(a._iDisplayStart=e.start,a.iInitDisplayStart=e.start);e.length!==k&&(a._iDisplayLength=e.length);e.order!==k&&(a.aaSorting=[],h.each(e.order,function(b,c){a.aaSorting.push(c[0]>=
d.length?[0,c[1]]:c)}));e.search!==k&&h.extend(a.oPreviousSearch,Bb(e.search));b=0;for(c=e.columns.length;b<c;b++){var f=e.columns[b];f.visible!==k&&(d[b].bVisible=f.visible);f.search!==k&&h.extend(a.aoPreSearchCols[b],Bb(f.search))}u(a,"aoStateLoaded","stateLoaded",[a,e])}}}function xa(a){var b=m.settings,a=h.inArray(a,G(b,"nTable"));return-1!==a?b[a]:null}function L(a,b,c,d){c="DataTables warning: "+(a?"table id="+a.sTableId+" - ":"")+c;d&&(c+=". For more information about this error, please see http://datatables.net/tn/"+
d);if(b)D.console&&console.log&&console.log(c);else if(b=m.ext,b=b.sErrMode||b.errMode,a&&u(a,null,"error",[a,d,c]),"alert"==b)alert(c);else{if("throw"==b)throw Error(c);"function"==typeof b&&b(a,d,c)}}function E(a,b,c,d){h.isArray(c)?h.each(c,function(c,d){h.isArray(d)?E(a,b,d[0],d[1]):E(a,b,d)}):(d===k&&(d=c),b[c]!==k&&(a[d]=b[c]))}function Lb(a,b,c){var d,e;for(e in b)b.hasOwnProperty(e)&&(d=b[e],h.isPlainObject(d)?(h.isPlainObject(a[e])||(a[e]={}),h.extend(!0,a[e],d)):a[e]=c&&"data"!==e&&"aaData"!==
e&&h.isArray(d)?d.slice():d);return a}function Wa(a,b,c){h(a).bind("click.DT",b,function(b){a.blur();c(b)}).bind("keypress.DT",b,function(a){13===a.which&&(a.preventDefault(),c(a))}).bind("selectstart.DT",function(){return!1})}function z(a,b,c,d){c&&a[b].push({fn:c,sName:d})}function u(a,b,c,d){var e=[];b&&(e=h.map(a[b].slice().reverse(),function(b){return b.fn.apply(a.oInstance,d)}));null!==c&&(b=h.Event(c+".dt"),h(a.nTable).trigger(b,d),e.push(b.result));return e}function Sa(a){var b=a._iDisplayStart,
c=a.fnDisplayEnd(),d=a._iDisplayLength;b>=c&&(b=c-d);b-=b%d;if(-1===d||0>b)b=0;a._iDisplayStart=b}function Na(a,b){var c=a.renderer,d=m.ext.renderer[b];return h.isPlainObject(c)&&c[b]?d[c[b]]||d._:"string"===typeof c?d[c]||d._:d._}function y(a){return a.oFeatures.bServerSide?"ssp":a.ajax||a.sAjaxSource?"ajax":"dom"}function ya(a,b){var c=[],c=Mb.numbers_length,d=Math.floor(c/2);b<=c?c=W(0,b):a<=d?(c=W(0,c-2),c.push("ellipsis"),c.push(b-1)):(a>=b-1-d?c=W(b-(c-2),b):(c=W(a-d+2,a+d-1),c.push("ellipsis"),
c.push(b-1)),c.splice(0,0,"ellipsis"),c.splice(0,0,0));c.DT_el="span";return c}function db(a){h.each({num:function(b){return za(b,a)},"num-fmt":function(b){return za(b,a,Xa)},"html-num":function(b){return za(b,a,Aa)},"html-num-fmt":function(b){return za(b,a,Aa,Xa)}},function(b,c){v.type.order[b+a+"-pre"]=c;b.match(/^html\-/)&&(v.type.search[b+a]=v.type.search.html)})}function Nb(a){return function(){var b=[xa(this[m.ext.iApiIndex])].concat(Array.prototype.slice.call(arguments));return m.ext.internal[a].apply(this,
b)}}var m=function(a){this.$=function(a,b){return this.api(!0).$(a,b)};this._=function(a,b){return this.api(!0).rows(a,b).data()};this.api=function(a){return a?new r(xa(this[v.iApiIndex])):new r(this)};this.fnAddData=function(a,b){var c=this.api(!0),d=h.isArray(a)&&(h.isArray(a[0])||h.isPlainObject(a[0]))?c.rows.add(a):c.row.add(a);(b===k||b)&&c.draw();return d.flatten().toArray()};this.fnAdjustColumnSizing=function(a){var b=this.api(!0).columns.adjust(),c=b.settings()[0],d=c.oScroll;a===k||a?b.draw(!1):
(""!==d.sX||""!==d.sY)&&ka(c)};this.fnClearTable=function(a){var b=this.api(!0).clear();(a===k||a)&&b.draw()};this.fnClose=function(a){this.api(!0).row(a).child.hide()};this.fnDeleteRow=function(a,b,c){var d=this.api(!0),a=d.rows(a),e=a.settings()[0],h=e.aoData[a[0][0]];a.remove();b&&b.call(this,e,h);(c===k||c)&&d.draw();return h};this.fnDestroy=function(a){this.api(!0).destroy(a)};this.fnDraw=function(a){this.api(!0).draw(a)};this.fnFilter=function(a,b,c,d,e,h){e=this.api(!0);null===b||b===k?e.search(a,
c,d,h):e.column(b).search(a,c,d,h);e.draw()};this.fnGetData=function(a,b){var c=this.api(!0);if(a!==k){var d=a.nodeName?a.nodeName.toLowerCase():"";return b!==k||"td"==d||"th"==d?c.cell(a,b).data():c.row(a).data()||null}return c.data().toArray()};this.fnGetNodes=function(a){var b=this.api(!0);return a!==k?b.row(a).node():b.rows().nodes().flatten().toArray()};this.fnGetPosition=function(a){var b=this.api(!0),c=a.nodeName.toUpperCase();return"TR"==c?b.row(a).index():"TD"==c||"TH"==c?(a=b.cell(a).index(),
[a.row,a.columnVisible,a.column]):null};this.fnIsOpen=function(a){return this.api(!0).row(a).child.isShown()};this.fnOpen=function(a,b,c){return this.api(!0).row(a).child(b,c).show().child()[0]};this.fnPageChange=function(a,b){var c=this.api(!0).page(a);(b===k||b)&&c.draw(!1)};this.fnSetColumnVis=function(a,b,c){a=this.api(!0).column(a).visible(b);(c===k||c)&&a.columns.adjust().draw()};this.fnSettings=function(){return xa(this[v.iApiIndex])};this.fnSort=function(a){this.api(!0).order(a).draw()};this.fnSortListener=
function(a,b,c){this.api(!0).order.listener(a,b,c)};this.fnUpdate=function(a,b,c,d,e){var h=this.api(!0);c===k||null===c?h.row(b).data(a):h.cell(b,c).data(a);(e===k||e)&&h.columns.adjust();(d===k||d)&&h.draw();return 0};this.fnVersionCheck=v.fnVersionCheck;var b=this,c=a===k,d=this.length;c&&(a={});this.oApi=this.internal=v.internal;for(var e in m.ext.internal)e&&(this[e]=Nb(e));this.each(function(){var e={},e=1<d?Lb(e,a,!0):a,g=0,j,i=this.getAttribute("id"),n=!1,l=m.defaults,q=h(this);if("table"!=
this.nodeName.toLowerCase())L(null,0,"Non-table node initialisation ("+this.nodeName+")",2);else{eb(l);fb(l.column);K(l,l,!0);K(l.column,l.column,!0);K(l,h.extend(e,q.data()));var t=m.settings,g=0;for(j=t.length;g<j;g++){var p=t[g];if(p.nTable==this||p.nTHead.parentNode==this||p.nTFoot&&p.nTFoot.parentNode==this){g=e.bRetrieve!==k?e.bRetrieve:l.bRetrieve;if(c||g)return p.oInstance;if(e.bDestroy!==k?e.bDestroy:l.bDestroy){p.oInstance.fnDestroy();break}else{L(p,0,"Cannot reinitialise DataTable",3);
return}}if(p.sTableId==this.id){t.splice(g,1);break}}if(null===i||""===i)this.id=i="DataTables_Table_"+m.ext._unique++;var o=h.extend(!0,{},m.models.oSettings,{sDestroyWidth:q[0].style.width,sInstance:i,sTableId:i});o.nTable=this;o.oApi=b.internal;o.oInit=e;t.push(o);o.oInstance=1===b.length?b:q.dataTable();eb(e);e.oLanguage&&Da(e.oLanguage);e.aLengthMenu&&!e.iDisplayLength&&(e.iDisplayLength=h.isArray(e.aLengthMenu[0])?e.aLengthMenu[0][0]:e.aLengthMenu[0]);e=Lb(h.extend(!0,{},l),e);E(o.oFeatures,
e,"bPaginate bLengthChange bFilter bSort bSortMulti bInfo bProcessing bAutoWidth bSortClasses bServerSide bDeferRender".split(" "));E(o,e,["asStripeClasses","ajax","fnServerData","fnFormatNumber","sServerMethod","aaSorting","aaSortingFixed","aLengthMenu","sPaginationType","sAjaxSource","sAjaxDataProp","iStateDuration","sDom","bSortCellsTop","iTabIndex","fnStateLoadCallback","fnStateSaveCallback","renderer","searchDelay","rowId",["iCookieDuration","iStateDuration"],["oSearch","oPreviousSearch"],["aoSearchCols",
"aoPreSearchCols"],["iDisplayLength","_iDisplayLength"],["bJQueryUI","bJUI"]]);E(o.oScroll,e,[["sScrollX","sX"],["sScrollXInner","sXInner"],["sScrollY","sY"],["bScrollCollapse","bCollapse"]]);E(o.oLanguage,e,"fnInfoCallback");z(o,"aoDrawCallback",e.fnDrawCallback,"user");z(o,"aoServerParams",e.fnServerParams,"user");z(o,"aoStateSaveParams",e.fnStateSaveParams,"user");z(o,"aoStateLoadParams",e.fnStateLoadParams,"user");z(o,"aoStateLoaded",e.fnStateLoaded,"user");z(o,"aoRowCallback",e.fnRowCallback,
"user");z(o,"aoRowCreatedCallback",e.fnCreatedRow,"user");z(o,"aoHeaderCallback",e.fnHeaderCallback,"user");z(o,"aoFooterCallback",e.fnFooterCallback,"user");z(o,"aoInitComplete",e.fnInitComplete,"user");z(o,"aoPreDrawCallback",e.fnPreDrawCallback,"user");o.rowIdFn=Q(e.rowId);gb(o);i=o.oClasses;e.bJQueryUI?(h.extend(i,m.ext.oJUIClasses,e.oClasses),e.sDom===l.sDom&&"lfrtip"===l.sDom&&(o.sDom='<"H"lfr>t<"F"ip>'),o.renderer)?h.isPlainObject(o.renderer)&&!o.renderer.header&&(o.renderer.header="jqueryui"):
o.renderer="jqueryui":h.extend(i,m.ext.classes,e.oClasses);q.addClass(i.sTable);o.iInitDisplayStart===k&&(o.iInitDisplayStart=e.iDisplayStart,o._iDisplayStart=e.iDisplayStart);null!==e.iDeferLoading&&(o.bDeferLoading=!0,g=h.isArray(e.iDeferLoading),o._iRecordsDisplay=g?e.iDeferLoading[0]:e.iDeferLoading,o._iRecordsTotal=g?e.iDeferLoading[1]:e.iDeferLoading);var r=o.oLanguage;h.extend(!0,r,e.oLanguage);""!==r.sUrl&&(h.ajax({dataType:"json",url:r.sUrl,success:function(a){Da(a);K(l.oLanguage,a);h.extend(true,
r,a);ga(o)},error:function(){ga(o)}}),n=!0);null===e.asStripeClasses&&(o.asStripeClasses=[i.sStripeOdd,i.sStripeEven]);var g=o.asStripeClasses,v=q.children("tbody").find("tr").eq(0);-1!==h.inArray(!0,h.map(g,function(a){return v.hasClass(a)}))&&(h("tbody tr",this).removeClass(g.join(" ")),o.asDestroyStripes=g.slice());t=[];g=this.getElementsByTagName("thead");0!==g.length&&(da(o.aoHeader,g[0]),t=qa(o));if(null===e.aoColumns){p=[];g=0;for(j=t.length;g<j;g++)p.push(null)}else p=e.aoColumns;g=0;for(j=
p.length;g<j;g++)Ea(o,t?t[g]:null);ib(o,e.aoColumnDefs,p,function(a,b){ja(o,a,b)});if(v.length){var s=function(a,b){return a.getAttribute("data-"+b)!==null?b:null};h(v[0]).children("th, td").each(function(a,b){var c=o.aoColumns[a];if(c.mData===a){var d=s(b,"sort")||s(b,"order"),e=s(b,"filter")||s(b,"search");if(d!==null||e!==null){c.mData={_:a+".display",sort:d!==null?a+".@data-"+d:k,type:d!==null?a+".@data-"+d:k,filter:e!==null?a+".@data-"+e:k};ja(o,a)}}})}var w=o.oFeatures;e.bStateSave&&(w.bStateSave=
!0,Kb(o,e),z(o,"aoDrawCallback",wa,"state_save"));if(e.aaSorting===k){t=o.aaSorting;g=0;for(j=t.length;g<j;g++)t[g][1]=o.aoColumns[g].asSorting[0]}va(o);w.bSort&&z(o,"aoDrawCallback",function(){if(o.bSorted){var a=V(o),b={};h.each(a,function(a,c){b[c.src]=c.dir});u(o,null,"order",[o,a,b]);Jb(o)}});z(o,"aoDrawCallback",function(){(o.bSorted||y(o)==="ssp"||w.bDeferRender)&&va(o)},"sc");g=q.children("caption").each(function(){this._captionSide=q.css("caption-side")});j=q.children("thead");0===j.length&&
(j=h("<thead/>").appendTo(this));o.nTHead=j[0];j=q.children("tbody");0===j.length&&(j=h("<tbody/>").appendTo(this));o.nTBody=j[0];j=q.children("tfoot");if(0===j.length&&0<g.length&&(""!==o.oScroll.sX||""!==o.oScroll.sY))j=h("<tfoot/>").appendTo(this);0===j.length||0===j.children().length?q.addClass(i.sNoFooter):0<j.length&&(o.nTFoot=j[0],da(o.aoFooter,o.nTFoot));if(e.aaData)for(g=0;g<e.aaData.length;g++)N(o,e.aaData[g]);else(o.bDeferLoading||"dom"==y(o))&&ma(o,h(o.nTBody).children("tr"));o.aiDisplay=
o.aiDisplayMaster.slice();o.bInitialised=!0;!1===n&&ga(o)}});b=null;return this},v,r,p,s,Ya={},Ob=/[\r\n]/g,Aa=/<.*?>/g,ac=/^[\w\+\-]/,bc=/[\w\+\-]$/,cc=RegExp("(\\/|\\.|\\*|\\+|\\?|\\||\\(|\\)|\\[|\\]|\\{|\\}|\\\\|\\$|\\^|\\-)","g"),Xa=/[',$£€¥%\u2009\u202F\u20BD\u20a9\u20BArfk]/gi,M=function(a){return!a||!0===a||"-"===a?!0:!1},Pb=function(a){var b=parseInt(a,10);return!isNaN(b)&&isFinite(a)?b:null},Qb=function(a,b){Ya[b]||(Ya[b]=RegExp(Qa(b),"g"));return"string"===typeof a&&"."!==b?a.replace(/\./g,
"").replace(Ya[b],"."):a},Za=function(a,b,c){var d="string"===typeof a;if(M(a))return!0;b&&d&&(a=Qb(a,b));c&&d&&(a=a.replace(Xa,""));return!isNaN(parseFloat(a))&&isFinite(a)},Rb=function(a,b,c){return M(a)?!0:!(M(a)||"string"===typeof a)?null:Za(a.replace(Aa,""),b,c)?!0:null},G=function(a,b,c){var d=[],e=0,f=a.length;if(c!==k)for(;e<f;e++)a[e]&&a[e][b]&&d.push(a[e][b][c]);else for(;e<f;e++)a[e]&&d.push(a[e][b]);return d},ha=function(a,b,c,d){var e=[],f=0,g=b.length;if(d!==k)for(;f<g;f++)a[b[f]][c]&&
e.push(a[b[f]][c][d]);else for(;f<g;f++)e.push(a[b[f]][c]);return e},W=function(a,b){var c=[],d;b===k?(b=0,d=a):(d=b,b=a);for(var e=b;e<d;e++)c.push(e);return c},Sb=function(a){for(var b=[],c=0,d=a.length;c<d;c++)a[c]&&b.push(a[c]);return b},pa=function(a){var b=[],c,d,e=a.length,f,g=0;d=0;a:for(;d<e;d++){c=a[d];for(f=0;f<g;f++)if(b[f]===c)continue a;b.push(c);g++}return b};m.util={throttle:function(a,b){var c=b!==k?b:200,d,e;return function(){var b=this,g=+new Date,h=arguments;d&&g<d+c?(clearTimeout(e),
e=setTimeout(function(){d=k;a.apply(b,h)},c)):(d=g,a.apply(b,h))}},escapeRegex:function(a){return a.replace(cc,"\\$1")}};var A=function(a,b,c){a[b]!==k&&(a[c]=a[b])},ba=/\[.*?\]$/,U=/\(\)$/,Qa=m.util.escapeRegex,ua=h("<div>")[0],Zb=ua.textContent!==k,$b=/<.*?>/g,Oa=m.util.throttle,Tb=[],w=Array.prototype,dc=function(a){var b,c,d=m.settings,e=h.map(d,function(a){return a.nTable});if(a){if(a.nTable&&a.oApi)return[a];if(a.nodeName&&"table"===a.nodeName.toLowerCase())return b=h.inArray(a,e),-1!==b?[d[b]]:
null;if(a&&"function"===typeof a.settings)return a.settings().toArray();"string"===typeof a?c=h(a):a instanceof h&&(c=a)}else return[];if(c)return c.map(function(){b=h.inArray(this,e);return-1!==b?d[b]:null}).toArray()};r=function(a,b){if(!(this instanceof r))return new r(a,b);var c=[],d=function(a){(a=dc(a))&&(c=c.concat(a))};if(h.isArray(a))for(var e=0,f=a.length;e<f;e++)d(a[e]);else d(a);this.context=pa(c);b&&h.merge(this,b);this.selector={rows:null,cols:null,opts:null};r.extend(this,this,Tb)};
m.Api=r;h.extend(r.prototype,{any:function(){return 0!==this.count()},concat:w.concat,context:[],count:function(){return this.flatten().length},each:function(a){for(var b=0,c=this.length;b<c;b++)a.call(this,this[b],b,this);return this},eq:function(a){var b=this.context;return b.length>a?new r(b[a],this[a]):null},filter:function(a){var b=[];if(w.filter)b=w.filter.call(this,a,this);else for(var c=0,d=this.length;c<d;c++)a.call(this,this[c],c,this)&&b.push(this[c]);return new r(this.context,b)},flatten:function(){var a=
[];return new r(this.context,a.concat.apply(a,this.toArray()))},join:w.join,indexOf:w.indexOf||function(a,b){for(var c=b||0,d=this.length;c<d;c++)if(this[c]===a)return c;return-1},iterator:function(a,b,c,d){var e=[],f,g,h,i,n,l=this.context,m,t,p=this.selector;"string"===typeof a&&(d=c,c=b,b=a,a=!1);g=0;for(h=l.length;g<h;g++){var o=new r(l[g]);if("table"===b)f=c.call(o,l[g],g),f!==k&&e.push(f);else if("columns"===b||"rows"===b)f=c.call(o,l[g],this[g],g),f!==k&&e.push(f);else if("column"===b||"column-rows"===
b||"row"===b||"cell"===b){t=this[g];"column-rows"===b&&(m=Ba(l[g],p.opts));i=0;for(n=t.length;i<n;i++)f=t[i],f="cell"===b?c.call(o,l[g],f.row,f.column,g,i):c.call(o,l[g],f,g,i,m),f!==k&&e.push(f)}}return e.length||d?(a=new r(l,a?e.concat.apply([],e):e),b=a.selector,b.rows=p.rows,b.cols=p.cols,b.opts=p.opts,a):this},lastIndexOf:w.lastIndexOf||function(a,b){return this.indexOf.apply(this.toArray.reverse(),arguments)},length:0,map:function(a){var b=[];if(w.map)b=w.map.call(this,a,this);else for(var c=
0,d=this.length;c<d;c++)b.push(a.call(this,this[c],c));return new r(this.context,b)},pluck:function(a){return this.map(function(b){return b[a]})},pop:w.pop,push:w.push,reduce:w.reduce||function(a,b){return hb(this,a,b,0,this.length,1)},reduceRight:w.reduceRight||function(a,b){return hb(this,a,b,this.length-1,-1,-1)},reverse:w.reverse,selector:null,shift:w.shift,sort:w.sort,splice:w.splice,toArray:function(){return w.slice.call(this)},to$:function(){return h(this)},toJQuery:function(){return h(this)},
unique:function(){return new r(this.context,pa(this))},unshift:w.unshift});r.extend=function(a,b,c){if(c.length&&b&&(b instanceof r||b.__dt_wrapper)){var d,e,f,g=function(a,b,c){return function(){var d=b.apply(a,arguments);r.extend(d,d,c.methodExt);return d}};d=0;for(e=c.length;d<e;d++)f=c[d],b[f.name]="function"===typeof f.val?g(a,f.val,f):h.isPlainObject(f.val)?{}:f.val,b[f.name].__dt_wrapper=!0,r.extend(a,b[f.name],f.propExt)}};r.register=p=function(a,b){if(h.isArray(a))for(var c=0,d=a.length;c<
d;c++)r.register(a[c],b);else for(var e=a.split("."),f=Tb,g,j,c=0,d=e.length;c<d;c++){g=(j=-1!==e[c].indexOf("()"))?e[c].replace("()",""):e[c];var i;a:{i=0;for(var n=f.length;i<n;i++)if(f[i].name===g){i=f[i];break a}i=null}i||(i={name:g,val:{},methodExt:[],propExt:[]},f.push(i));c===d-1?i.val=b:f=j?i.methodExt:i.propExt}};r.registerPlural=s=function(a,b,c){r.register(a,c);r.register(b,function(){var a=c.apply(this,arguments);return a===this?this:a instanceof r?a.length?h.isArray(a[0])?new r(a.context,
a[0]):a[0]:k:a})};p("tables()",function(a){var b;if(a){b=r;var c=this.context;if("number"===typeof a)a=[c[a]];else var d=h.map(c,function(a){return a.nTable}),a=h(d).filter(a).map(function(){var a=h.inArray(this,d);return c[a]}).toArray();b=new b(a)}else b=this;return b});p("table()",function(a){var a=this.tables(a),b=a.context;return b.length?new r(b[0]):a});s("tables().nodes()","table().node()",function(){return this.iterator("table",function(a){return a.nTable},1)});s("tables().body()","table().body()",
function(){return this.iterator("table",function(a){return a.nTBody},1)});s("tables().header()","table().header()",function(){return this.iterator("table",function(a){return a.nTHead},1)});s("tables().footer()","table().footer()",function(){return this.iterator("table",function(a){return a.nTFoot},1)});s("tables().containers()","table().container()",function(){return this.iterator("table",function(a){return a.nTableWrapper},1)});p("draw()",function(a){return this.iterator("table",function(b){"page"===
a?O(b):("string"===typeof a&&(a="full-hold"===a?!1:!0),T(b,!1===a))})});p("page()",function(a){return a===k?this.page.info().page:this.iterator("table",function(b){Ta(b,a)})});p("page.info()",function(){if(0===this.context.length)return k;var a=this.context[0],b=a._iDisplayStart,c=a.oFeatures.bPaginate?a._iDisplayLength:-1,d=a.fnRecordsDisplay(),e=-1===c;return{page:e?0:Math.floor(b/c),pages:e?1:Math.ceil(d/c),start:b,end:a.fnDisplayEnd(),length:c,recordsTotal:a.fnRecordsTotal(),recordsDisplay:d,
serverSide:"ssp"===y(a)}});p("page.len()",function(a){return a===k?0!==this.context.length?this.context[0]._iDisplayLength:k:this.iterator("table",function(b){Ra(b,a)})});var Ub=function(a,b,c){if(c){var d=new r(a);d.one("draw",function(){c(d.ajax.json())})}if("ssp"==y(a))T(a,b);else{C(a,!0);var e=a.jqXHR;e&&4!==e.readyState&&e.abort();ra(a,[],function(c){na(a);for(var c=sa(a,c),d=0,e=c.length;d<e;d++)N(a,c[d]);T(a,b);C(a,!1)})}};p("ajax.json()",function(){var a=this.context;if(0<a.length)return a[0].json});
p("ajax.params()",function(){var a=this.context;if(0<a.length)return a[0].oAjaxData});p("ajax.reload()",function(a,b){return this.iterator("table",function(c){Ub(c,!1===b,a)})});p("ajax.url()",function(a){var b=this.context;if(a===k){if(0===b.length)return k;b=b[0];return b.ajax?h.isPlainObject(b.ajax)?b.ajax.url:b.ajax:b.sAjaxSource}return this.iterator("table",function(b){h.isPlainObject(b.ajax)?b.ajax.url=a:b.ajax=a})});p("ajax.url().load()",function(a,b){return this.iterator("table",function(c){Ub(c,
!1===b,a)})});var $a=function(a,b,c,d,e){var f=[],g,j,i,n,l,m;i=typeof b;if(!b||"string"===i||"function"===i||b.length===k)b=[b];i=0;for(n=b.length;i<n;i++){j=b[i]&&b[i].split?b[i].split(","):[b[i]];l=0;for(m=j.length;l<m;l++)(g=c("string"===typeof j[l]?h.trim(j[l]):j[l]))&&g.length&&(f=f.concat(g))}a=v.selector[a];if(a.length){i=0;for(n=a.length;i<n;i++)f=a[i](d,e,f)}return pa(f)},ab=function(a){a||(a={});a.filter&&a.search===k&&(a.search=a.filter);return h.extend({search:"none",order:"current",
page:"all"},a)},bb=function(a){for(var b=0,c=a.length;b<c;b++)if(0<a[b].length)return a[0]=a[b],a[0].length=1,a.length=1,a.context=[a.context[b]],a;a.length=0;return a},Ba=function(a,b){var c,d,e,f=[],g=a.aiDisplay;c=a.aiDisplayMaster;var j=b.search;d=b.order;e=b.page;if("ssp"==y(a))return"removed"===j?[]:W(0,c.length);if("current"==e){c=a._iDisplayStart;for(d=a.fnDisplayEnd();c<d;c++)f.push(g[c])}else if("current"==d||"applied"==d)f="none"==j?c.slice():"applied"==j?g.slice():h.map(c,function(a){return-1===
h.inArray(a,g)?a:null});else if("index"==d||"original"==d){c=0;for(d=a.aoData.length;c<d;c++)"none"==j?f.push(c):(e=h.inArray(c,g),(-1===e&&"removed"==j||0<=e&&"applied"==j)&&f.push(c))}return f};p("rows()",function(a,b){a===k?a="":h.isPlainObject(a)&&(b=a,a="");var b=ab(b),c=this.iterator("table",function(c){var e=b;return $a("row",a,function(a){var b=Pb(a);if(b!==null&&!e)return[b];var j=Ba(c,e);if(b!==null&&h.inArray(b,j)!==-1)return[b];if(!a)return j;if(typeof a==="function")return h.map(j,function(b){var e=
c.aoData[b];return a(b,e._aData,e.nTr)?b:null});b=Sb(ha(c.aoData,j,"nTr"));if(a.nodeName){if(a._DT_RowIndex!==k)return[a._DT_RowIndex];if(a._DT_CellIndex)return[a._DT_CellIndex.row];b=h(a).closest("*[data-dt-row]");return b.length?[b.data("dt-row")]:[]}if(typeof a==="string"&&a.charAt(0)==="#"){j=c.aIds[a.replace(/^#/,"")];if(j!==k)return[j.idx]}return h(b).filter(a).map(function(){return this._DT_RowIndex}).toArray()},c,e)},1);c.selector.rows=a;c.selector.opts=b;return c});p("rows().nodes()",function(){return this.iterator("row",
function(a,b){return a.aoData[b].nTr||k},1)});p("rows().data()",function(){return this.iterator(!0,"rows",function(a,b){return ha(a.aoData,b,"_aData")},1)});s("rows().cache()","row().cache()",function(a){return this.iterator("row",function(b,c){var d=b.aoData[c];return"search"===a?d._aFilterData:d._aSortData},1)});s("rows().invalidate()","row().invalidate()",function(a){return this.iterator("row",function(b,c){ca(b,c,a)})});s("rows().indexes()","row().index()",function(){return this.iterator("row",
function(a,b){return b},1)});s("rows().ids()","row().id()",function(a){for(var b=[],c=this.context,d=0,e=c.length;d<e;d++)for(var f=0,g=this[d].length;f<g;f++){var h=c[d].rowIdFn(c[d].aoData[this[d][f]]._aData);b.push((!0===a?"#":"")+h)}return new r(c,b)});s("rows().remove()","row().remove()",function(){var a=this;this.iterator("row",function(b,c,d){var e=b.aoData,f=e[c],g,h,i,n,l;e.splice(c,1);g=0;for(h=e.length;g<h;g++)if(i=e[g],l=i.anCells,null!==i.nTr&&(i.nTr._DT_RowIndex=g),null!==l){i=0;for(n=
l.length;i<n;i++)l[i]._DT_CellIndex.row=g}oa(b.aiDisplayMaster,c);oa(b.aiDisplay,c);oa(a[d],c,!1);Sa(b);c=b.rowIdFn(f._aData);c!==k&&delete b.aIds[c]});this.iterator("table",function(a){for(var c=0,d=a.aoData.length;c<d;c++)a.aoData[c].idx=c});return this});p("rows.add()",function(a){var b=this.iterator("table",function(b){var c,f,g,h=[];f=0;for(g=a.length;f<g;f++)c=a[f],c.nodeName&&"TR"===c.nodeName.toUpperCase()?h.push(ma(b,c)[0]):h.push(N(b,c));return h},1),c=this.rows(-1);c.pop();h.merge(c,b);
return c});p("row()",function(a,b){return bb(this.rows(a,b))});p("row().data()",function(a){var b=this.context;if(a===k)return b.length&&this.length?b[0].aoData[this[0]]._aData:k;b[0].aoData[this[0]]._aData=a;ca(b[0],this[0],"data");return this});p("row().node()",function(){var a=this.context;return a.length&&this.length?a[0].aoData[this[0]].nTr||null:null});p("row.add()",function(a){a instanceof h&&a.length&&(a=a[0]);var b=this.iterator("table",function(b){return a.nodeName&&"TR"===a.nodeName.toUpperCase()?
ma(b,a)[0]:N(b,a)});return this.row(b[0])});var cb=function(a,b){var c=a.context;if(c.length&&(c=c[0].aoData[b!==k?b:a[0]])&&c._details)c._details.remove(),c._detailsShow=k,c._details=k},Vb=function(a,b){var c=a.context;if(c.length&&a.length){var d=c[0].aoData[a[0]];if(d._details){(d._detailsShow=b)?d._details.insertAfter(d.nTr):d._details.detach();var e=c[0],f=new r(e),g=e.aoData;f.off("draw.dt.DT_details column-visibility.dt.DT_details destroy.dt.DT_details");0<G(g,"_details").length&&(f.on("draw.dt.DT_details",
function(a,b){e===b&&f.rows({page:"current"}).eq(0).each(function(a){a=g[a];a._detailsShow&&a._details.insertAfter(a.nTr)})}),f.on("column-visibility.dt.DT_details",function(a,b){if(e===b)for(var c,d=aa(b),f=0,h=g.length;f<h;f++)c=g[f],c._details&&c._details.children("td[colspan]").attr("colspan",d)}),f.on("destroy.dt.DT_details",function(a,b){if(e===b)for(var c=0,d=g.length;c<d;c++)g[c]._details&&cb(f,c)}))}}};p("row().child()",function(a,b){var c=this.context;if(a===k)return c.length&&this.length?
c[0].aoData[this[0]]._details:k;if(!0===a)this.child.show();else if(!1===a)cb(this);else if(c.length&&this.length){var d=c[0],c=c[0].aoData[this[0]],e=[],f=function(a,b){if(h.isArray(a)||a instanceof h)for(var c=0,k=a.length;c<k;c++)f(a[c],b);else a.nodeName&&"tr"===a.nodeName.toLowerCase()?e.push(a):(c=h("<tr><td/></tr>").addClass(b),h("td",c).addClass(b).html(a)[0].colSpan=aa(d),e.push(c[0]))};f(a,b);c._details&&c._details.remove();c._details=h(e);c._detailsShow&&c._details.insertAfter(c.nTr)}return this});
p(["row().child.show()","row().child().show()"],function(){Vb(this,!0);return this});p(["row().child.hide()","row().child().hide()"],function(){Vb(this,!1);return this});p(["row().child.remove()","row().child().remove()"],function(){cb(this);return this});p("row().child.isShown()",function(){var a=this.context;return a.length&&this.length?a[0].aoData[this[0]]._detailsShow||!1:!1});var ec=/^(.+):(name|visIdx|visible)$/,Wb=function(a,b,c,d,e){for(var c=[],d=0,f=e.length;d<f;d++)c.push(B(a,e[d],b));
return c};p("columns()",function(a,b){a===k?a="":h.isPlainObject(a)&&(b=a,a="");var b=ab(b),c=this.iterator("table",function(c){var e=a,f=b,g=c.aoColumns,j=G(g,"sName"),i=G(g,"nTh");return $a("column",e,function(a){var b=Pb(a);if(a==="")return W(g.length);if(b!==null)return[b>=0?b:g.length+b];if(typeof a==="function"){var e=Ba(c,f);return h.map(g,function(b,f){return a(f,Wb(c,f,0,0,e),i[f])?f:null})}var k=typeof a==="string"?a.match(ec):"";if(k)switch(k[2]){case "visIdx":case "visible":b=parseInt(k[1],
10);if(b<0){var m=h.map(g,function(a,b){return a.bVisible?b:null});return[m[m.length+b]]}return[Z(c,b)];case "name":return h.map(j,function(a,b){return a===k[1]?b:null});default:return[]}if(a.nodeName&&a._DT_CellIndex)return[a._DT_CellIndex.column];b=h(i).filter(a).map(function(){return h.inArray(this,i)}).toArray();if(b.length||!a.nodeName)return b;b=h(a).closest("*[data-dt-column]");return b.length?[b.data("dt-column")]:[]},c,f)},1);c.selector.cols=a;c.selector.opts=b;return c});s("columns().header()",
"column().header()",function(){return this.iterator("column",function(a,b){return a.aoColumns[b].nTh},1)});s("columns().footer()","column().footer()",function(){return this.iterator("column",function(a,b){return a.aoColumns[b].nTf},1)});s("columns().data()","column().data()",function(){return this.iterator("column-rows",Wb,1)});s("columns().dataSrc()","column().dataSrc()",function(){return this.iterator("column",function(a,b){return a.aoColumns[b].mData},1)});s("columns().cache()","column().cache()",
function(a){return this.iterator("column-rows",function(b,c,d,e,f){return ha(b.aoData,f,"search"===a?"_aFilterData":"_aSortData",c)},1)});s("columns().nodes()","column().nodes()",function(){return this.iterator("column-rows",function(a,b,c,d,e){return ha(a.aoData,e,"anCells",b)},1)});s("columns().visible()","column().visible()",function(a,b){var c=this.iterator("column",function(b,c){if(a===k)return b.aoColumns[c].bVisible;var f=b.aoColumns,g=f[c],j=b.aoData,i,n,l;if(a!==k&&g.bVisible!==a){if(a){var m=
h.inArray(!0,G(f,"bVisible"),c+1);i=0;for(n=j.length;i<n;i++)l=j[i].nTr,f=j[i].anCells,l&&l.insertBefore(f[c],f[m]||null)}else h(G(b.aoData,"anCells",c)).detach();g.bVisible=a;ea(b,b.aoHeader);ea(b,b.aoFooter);wa(b)}});a!==k&&(this.iterator("column",function(c,e){u(c,null,"column-visibility",[c,e,a,b])}),(b===k||b)&&this.columns.adjust());return c});s("columns().indexes()","column().index()",function(a){return this.iterator("column",function(b,c){return"visible"===a?$(b,c):c},1)});p("columns.adjust()",
function(){return this.iterator("table",function(a){Y(a)},1)});p("column.index()",function(a,b){if(0!==this.context.length){var c=this.context[0];if("fromVisible"===a||"toData"===a)return Z(c,b);if("fromData"===a||"toVisible"===a)return $(c,b)}});p("column()",function(a,b){return bb(this.columns(a,b))});p("cells()",function(a,b,c){h.isPlainObject(a)&&(a.row===k?(c=a,a=null):(c=b,b=null));h.isPlainObject(b)&&(c=b,b=null);if(null===b||b===k)return this.iterator("table",function(b){var d=a,e=ab(c),f=
b.aoData,g=Ba(b,e),j=Sb(ha(f,g,"anCells")),i=h([].concat.apply([],j)),l,n=b.aoColumns.length,m,p,r,u,v,s;return $a("cell",d,function(a){var c=typeof a==="function";if(a===null||a===k||c){m=[];p=0;for(r=g.length;p<r;p++){l=g[p];for(u=0;u<n;u++){v={row:l,column:u};if(c){s=f[l];a(v,B(b,l,u),s.anCells?s.anCells[u]:null)&&m.push(v)}else m.push(v)}}return m}if(h.isPlainObject(a))return[a];c=i.filter(a).map(function(a,b){return{row:b._DT_CellIndex.row,column:b._DT_CellIndex.column}}).toArray();if(c.length||
!a.nodeName)return c;s=h(a).closest("*[data-dt-row]");return s.length?[{row:s.data("dt-row"),column:s.data("dt-column")}]:[]},b,e)});var d=this.columns(b,c),e=this.rows(a,c),f,g,j,i,n,l=this.iterator("table",function(a,b){f=[];g=0;for(j=e[b].length;g<j;g++){i=0;for(n=d[b].length;i<n;i++)f.push({row:e[b][g],column:d[b][i]})}return f},1);h.extend(l.selector,{cols:b,rows:a,opts:c});return l});s("cells().nodes()","cell().node()",function(){return this.iterator("cell",function(a,b,c){return(a=a.aoData[b])&&
a.anCells?a.anCells[c]:k},1)});p("cells().data()",function(){return this.iterator("cell",function(a,b,c){return B(a,b,c)},1)});s("cells().cache()","cell().cache()",function(a){a="search"===a?"_aFilterData":"_aSortData";return this.iterator("cell",function(b,c,d){return b.aoData[c][a][d]},1)});s("cells().render()","cell().render()",function(a){return this.iterator("cell",function(b,c,d){return B(b,c,d,a)},1)});s("cells().indexes()","cell().index()",function(){return this.iterator("cell",function(a,
b,c){return{row:b,column:c,columnVisible:$(a,c)}},1)});s("cells().invalidate()","cell().invalidate()",function(a){return this.iterator("cell",function(b,c,d){ca(b,c,a,d)})});p("cell()",function(a,b,c){return bb(this.cells(a,b,c))});p("cell().data()",function(a){var b=this.context,c=this[0];if(a===k)return b.length&&c.length?B(b[0],c[0].row,c[0].column):k;jb(b[0],c[0].row,c[0].column,a);ca(b[0],c[0].row,"data",c[0].column);return this});p("order()",function(a,b){var c=this.context;if(a===k)return 0!==
c.length?c[0].aaSorting:k;"number"===typeof a?a=[[a,b]]:a.length&&!h.isArray(a[0])&&(a=Array.prototype.slice.call(arguments));return this.iterator("table",function(b){b.aaSorting=a.slice()})});p("order.listener()",function(a,b,c){return this.iterator("table",function(d){Ma(d,a,b,c)})});p("order.fixed()",function(a){if(!a){var b=this.context,b=b.length?b[0].aaSortingFixed:k;return h.isArray(b)?{pre:b}:b}return this.iterator("table",function(b){b.aaSortingFixed=h.extend(!0,{},a)})});p(["columns().order()",
"column().order()"],function(a){var b=this;return this.iterator("table",function(c,d){var e=[];h.each(b[d],function(b,c){e.push([c,a])});c.aaSorting=e})});p("search()",function(a,b,c,d){var e=this.context;return a===k?0!==e.length?e[0].oPreviousSearch.sSearch:k:this.iterator("table",function(e){e.oFeatures.bFilter&&fa(e,h.extend({},e.oPreviousSearch,{sSearch:a+"",bRegex:null===b?!1:b,bSmart:null===c?!0:c,bCaseInsensitive:null===d?!0:d}),1)})});s("columns().search()","column().search()",function(a,
b,c,d){return this.iterator("column",function(e,f){var g=e.aoPreSearchCols;if(a===k)return g[f].sSearch;e.oFeatures.bFilter&&(h.extend(g[f],{sSearch:a+"",bRegex:null===b?!1:b,bSmart:null===c?!0:c,bCaseInsensitive:null===d?!0:d}),fa(e,e.oPreviousSearch,1))})});p("state()",function(){return this.context.length?this.context[0].oSavedState:null});p("state.clear()",function(){return this.iterator("table",function(a){a.fnStateSaveCallback.call(a.oInstance,a,{})})});p("state.loaded()",function(){return this.context.length?
this.context[0].oLoadedState:null});p("state.save()",function(){return this.iterator("table",function(a){wa(a)})});m.versionCheck=m.fnVersionCheck=function(a){for(var b=m.version.split("."),a=a.split("."),c,d,e=0,f=a.length;e<f;e++)if(c=parseInt(b[e],10)||0,d=parseInt(a[e],10)||0,c!==d)return c>d;return!0};m.isDataTable=m.fnIsDataTable=function(a){var b=h(a).get(0),c=!1;h.each(m.settings,function(a,e){var f=e.nScrollHead?h("table",e.nScrollHead)[0]:null,g=e.nScrollFoot?h("table",e.nScrollFoot)[0]:
null;if(e.nTable===b||f===b||g===b)c=!0});return c};m.tables=m.fnTables=function(a){var b=!1;h.isPlainObject(a)&&(b=a.api,a=a.visible);var c=h.map(m.settings,function(b){if(!a||a&&h(b.nTable).is(":visible"))return b.nTable});return b?new r(c):c};m.camelToHungarian=K;p("$()",function(a,b){var c=this.rows(b).nodes(),c=h(c);return h([].concat(c.filter(a).toArray(),c.find(a).toArray()))});h.each(["on","one","off"],function(a,b){p(b+"()",function(){var a=Array.prototype.slice.call(arguments);a[0].match(/\.dt\b/)||
(a[0]+=".dt");var d=h(this.tables().nodes());d[b].apply(d,a);return this})});p("clear()",function(){return this.iterator("table",function(a){na(a)})});p("settings()",function(){return new r(this.context,this.context)});p("init()",function(){var a=this.context;return a.length?a[0].oInit:null});p("data()",function(){return this.iterator("table",function(a){return G(a.aoData,"_aData")}).flatten()});p("destroy()",function(a){a=a||!1;return this.iterator("table",function(b){var c=b.nTableWrapper.parentNode,
d=b.oClasses,e=b.nTable,f=b.nTBody,g=b.nTHead,j=b.nTFoot,i=h(e),f=h(f),k=h(b.nTableWrapper),l=h.map(b.aoData,function(a){return a.nTr}),p;b.bDestroying=!0;u(b,"aoDestroyCallback","destroy",[b]);a||(new r(b)).columns().visible(!0);k.unbind(".DT").find(":not(tbody *)").unbind(".DT");h(D).unbind(".DT-"+b.sInstance);e!=g.parentNode&&(i.children("thead").detach(),i.append(g));j&&e!=j.parentNode&&(i.children("tfoot").detach(),i.append(j));b.aaSorting=[];b.aaSortingFixed=[];va(b);h(l).removeClass(b.asStripeClasses.join(" "));
h("th, td",g).removeClass(d.sSortable+" "+d.sSortableAsc+" "+d.sSortableDesc+" "+d.sSortableNone);b.bJUI&&(h("th span."+d.sSortIcon+", td span."+d.sSortIcon,g).detach(),h("th, td",g).each(function(){var a=h("div."+d.sSortJUIWrapper,this);h(this).append(a.contents());a.detach()}));f.children().detach();f.append(l);g=a?"remove":"detach";i[g]();k[g]();!a&&c&&(c.insertBefore(e,b.nTableReinsertBefore),i.css("width",b.sDestroyWidth).removeClass(d.sTable),(p=b.asDestroyStripes.length)&&f.children().each(function(a){h(this).addClass(b.asDestroyStripes[a%
p])}));c=h.inArray(b,m.settings);-1!==c&&m.settings.splice(c,1)})});h.each(["column","row","cell"],function(a,b){p(b+"s().every()",function(a){var d=this.selector.opts,e=this;return this.iterator(b,function(f,g,h,i,n){a.call(e[b](g,"cell"===b?h:d,"cell"===b?d:k),g,h,i,n)})})});p("i18n()",function(a,b,c){var d=this.context[0],a=Q(a)(d.oLanguage);a===k&&(a=b);c!==k&&h.isPlainObject(a)&&(a=a[c]!==k?a[c]:a._);return a.replace("%d",c)});m.version="1.10.12";m.settings=[];m.models={};m.models.oSearch={bCaseInsensitive:!0,
sSearch:"",bRegex:!1,bSmart:!0};m.models.oRow={nTr:null,anCells:null,_aData:[],_aSortData:null,_aFilterData:null,_sFilterRow:null,_sRowStripe:"",src:null,idx:-1};m.models.oColumn={idx:null,aDataSort:null,asSorting:null,bSearchable:null,bSortable:null,bVisible:null,_sManualType:null,_bAttrSrc:!1,fnCreatedCell:null,fnGetData:null,fnSetData:null,mData:null,mRender:null,nTh:null,nTf:null,sClass:null,sContentPadding:null,sDefaultContent:null,sName:null,sSortDataType:"std",sSortingClass:null,sSortingClassJUI:null,
sTitle:null,sType:null,sWidth:null,sWidthOrig:null};m.defaults={aaData:null,aaSorting:[[0,"asc"]],aaSortingFixed:[],ajax:null,aLengthMenu:[10,25,50,100],aoColumns:null,aoColumnDefs:null,aoSearchCols:[],asStripeClasses:null,bAutoWidth:!0,bDeferRender:!1,bDestroy:!1,bFilter:!0,bInfo:!0,bJQueryUI:!1,bLengthChange:!0,bPaginate:!0,bProcessing:!1,bRetrieve:!1,bScrollCollapse:!1,bServerSide:!1,bSort:!0,bSortMulti:!0,bSortCellsTop:!1,bSortClasses:!0,bStateSave:!1,fnCreatedRow:null,fnDrawCallback:null,fnFooterCallback:null,
fnFormatNumber:function(a){return a.toString().replace(/\B(?=(\d{3})+(?!\d))/g,this.oLanguage.sThousands)},fnHeaderCallback:null,fnInfoCallback:null,fnInitComplete:null,fnPreDrawCallback:null,fnRowCallback:null,fnServerData:null,fnServerParams:null,fnStateLoadCallback:function(a){try{return JSON.parse((-1===a.iStateDuration?sessionStorage:localStorage).getItem("DataTables_"+a.sInstance+"_"+location.pathname))}catch(b){}},fnStateLoadParams:null,fnStateLoaded:null,fnStateSaveCallback:function(a,b){try{(-1===
a.iStateDuration?sessionStorage:localStorage).setItem("DataTables_"+a.sInstance+"_"+location.pathname,JSON.stringify(b))}catch(c){}},fnStateSaveParams:null,iStateDuration:7200,iDeferLoading:null,iDisplayLength:10,iDisplayStart:0,iTabIndex:0,oClasses:{},oLanguage:{oAria:{sSortAscending:": activate to sort column ascending",sSortDescending:": activate to sort column descending"},oPaginate:{sFirst:"First",sLast:"Last",sNext:"Next",sPrevious:"Previous"},sEmptyTable:"No data available in table",sInfo:"Showing _START_ to _END_ of _TOTAL_ entries",
sInfoEmpty:"Showing 0 to 0 of 0 entries",sInfoFiltered:"(filtered from _MAX_ total entries)",sInfoPostFix:"",sDecimal:"",sThousands:",",sLengthMenu:"Show _MENU_ entries",sLoadingRecords:"Loading...",sProcessing:"Processing...",sSearch:"Search:",sSearchPlaceholder:"",sUrl:"",sZeroRecords:"No matching records found"},oSearch:h.extend({},m.models.oSearch),sAjaxDataProp:"data",sAjaxSource:null,sDom:"lfrtip",searchDelay:null,sPaginationType:"simple_numbers",sScrollX:"",sScrollXInner:"",sScrollY:"",sServerMethod:"GET",
renderer:null,rowId:"DT_RowId"};X(m.defaults);m.defaults.column={aDataSort:null,iDataSort:-1,asSorting:["asc","desc"],bSearchable:!0,bSortable:!0,bVisible:!0,fnCreatedCell:null,mData:null,mRender:null,sCellType:"td",sClass:"",sContentPadding:"",sDefaultContent:null,sName:"",sSortDataType:"std",sTitle:null,sType:null,sWidth:null};X(m.defaults.column);m.models.oSettings={oFeatures:{bAutoWidth:null,bDeferRender:null,bFilter:null,bInfo:null,bLengthChange:null,bPaginate:null,bProcessing:null,bServerSide:null,
bSort:null,bSortMulti:null,bSortClasses:null,bStateSave:null},oScroll:{bCollapse:null,iBarWidth:0,sX:null,sXInner:null,sY:null},oLanguage:{fnInfoCallback:null},oBrowser:{bScrollOversize:!1,bScrollbarLeft:!1,bBounding:!1,barWidth:0},ajax:null,aanFeatures:[],aoData:[],aiDisplay:[],aiDisplayMaster:[],aIds:{},aoColumns:[],aoHeader:[],aoFooter:[],oPreviousSearch:{},aoPreSearchCols:[],aaSorting:null,aaSortingFixed:[],asStripeClasses:null,asDestroyStripes:[],sDestroyWidth:0,aoRowCallback:[],aoHeaderCallback:[],
aoFooterCallback:[],aoDrawCallback:[],aoRowCreatedCallback:[],aoPreDrawCallback:[],aoInitComplete:[],aoStateSaveParams:[],aoStateLoadParams:[],aoStateLoaded:[],sTableId:"",nTable:null,nTHead:null,nTFoot:null,nTBody:null,nTableWrapper:null,bDeferLoading:!1,bInitialised:!1,aoOpenRows:[],sDom:null,searchDelay:null,sPaginationType:"two_button",iStateDuration:0,aoStateSave:[],aoStateLoad:[],oSavedState:null,oLoadedState:null,sAjaxSource:null,sAjaxDataProp:null,bAjaxDataGet:!0,jqXHR:null,json:k,oAjaxData:k,
fnServerData:null,aoServerParams:[],sServerMethod:null,fnFormatNumber:null,aLengthMenu:null,iDraw:0,bDrawing:!1,iDrawError:-1,_iDisplayLength:10,_iDisplayStart:0,_iRecordsTotal:0,_iRecordsDisplay:0,bJUI:null,oClasses:{},bFiltered:!1,bSorted:!1,bSortCellsTop:null,oInit:null,aoDestroyCallback:[],fnRecordsTotal:function(){return"ssp"==y(this)?1*this._iRecordsTotal:this.aiDisplayMaster.length},fnRecordsDisplay:function(){return"ssp"==y(this)?1*this._iRecordsDisplay:this.aiDisplay.length},fnDisplayEnd:function(){var a=
this._iDisplayLength,b=this._iDisplayStart,c=b+a,d=this.aiDisplay.length,e=this.oFeatures,f=e.bPaginate;return e.bServerSide?!1===f||-1===a?b+d:Math.min(b+a,this._iRecordsDisplay):!f||c>d||-1===a?d:c},oInstance:null,sInstance:null,iTabIndex:0,nScrollHead:null,nScrollFoot:null,aLastSort:[],oPlugins:{},rowIdFn:null,rowId:null};m.ext=v={buttons:{},classes:{},build:"dt/dt-1.10.12",errMode:"alert",feature:[],search:[],selector:{cell:[],column:[],row:[]},internal:{},legacy:{ajax:null},pager:{},renderer:{pageButton:{},
header:{}},order:{},type:{detect:[],search:{},order:{}},_unique:0,fnVersionCheck:m.fnVersionCheck,iApiIndex:0,oJUIClasses:{},sVersion:m.version};h.extend(v,{afnFiltering:v.search,aTypes:v.type.detect,ofnSearch:v.type.search,oSort:v.type.order,afnSortData:v.order,aoFeatures:v.feature,oApi:v.internal,oStdClasses:v.classes,oPagination:v.pager});h.extend(m.ext.classes,{sTable:"dataTable",sNoFooter:"no-footer",sPageButton:"paginate_button",sPageButtonActive:"current",sPageButtonDisabled:"disabled",sStripeOdd:"odd",
sStripeEven:"even",sRowEmpty:"dataTables_empty",sWrapper:"dataTables_wrapper",sFilter:"dataTables_filter",sInfo:"dataTables_info",sPaging:"dataTables_paginate paging_",sLength:"dataTables_length",sProcessing:"dataTables_processing",sSortAsc:"sorting_asc",sSortDesc:"sorting_desc",sSortable:"sorting",sSortableAsc:"sorting_asc_disabled",sSortableDesc:"sorting_desc_disabled",sSortableNone:"sorting_disabled",sSortColumn:"sorting_",sFilterInput:"",sLengthSelect:"",sScrollWrapper:"dataTables_scroll",sScrollHead:"dataTables_scrollHead",
sScrollHeadInner:"dataTables_scrollHeadInner",sScrollBody:"dataTables_scrollBody",sScrollFoot:"dataTables_scrollFoot",sScrollFootInner:"dataTables_scrollFootInner",sHeaderTH:"",sFooterTH:"",sSortJUIAsc:"",sSortJUIDesc:"",sSortJUI:"",sSortJUIAscAllowed:"",sSortJUIDescAllowed:"",sSortJUIWrapper:"",sSortIcon:"",sJUIHeader:"",sJUIFooter:""});var Ca="",Ca="",H=Ca+"ui-state-default",ia=Ca+"css_right ui-icon ui-icon-",Xb=Ca+"fg-toolbar ui-toolbar ui-widget-header ui-helper-clearfix";h.extend(m.ext.oJUIClasses,
m.ext.classes,{sPageButton:"fg-button ui-button "+H,sPageButtonActive:"ui-state-disabled",sPageButtonDisabled:"ui-state-disabled",sPaging:"dataTables_paginate fg-buttonset ui-buttonset fg-buttonset-multi ui-buttonset-multi paging_",sSortAsc:H+" sorting_asc",sSortDesc:H+" sorting_desc",sSortable:H+" sorting",sSortableAsc:H+" sorting_asc_disabled",sSortableDesc:H+" sorting_desc_disabled",sSortableNone:H+" sorting_disabled",sSortJUIAsc:ia+"triangle-1-n",sSortJUIDesc:ia+"triangle-1-s",sSortJUI:ia+"carat-2-n-s",
sSortJUIAscAllowed:ia+"carat-1-n",sSortJUIDescAllowed:ia+"carat-1-s",sSortJUIWrapper:"DataTables_sort_wrapper",sSortIcon:"DataTables_sort_icon",sScrollHead:"dataTables_scrollHead "+H,sScrollFoot:"dataTables_scrollFoot "+H,sHeaderTH:H,sFooterTH:H,sJUIHeader:Xb+" ui-corner-tl ui-corner-tr",sJUIFooter:Xb+" ui-corner-bl ui-corner-br"});var Mb=m.ext.pager;h.extend(Mb,{simple:function(){return["previous","next"]},full:function(){return["first","previous","next","last"]},numbers:function(a,b){return[ya(a,
b)]},simple_numbers:function(a,b){return["previous",ya(a,b),"next"]},full_numbers:function(a,b){return["first","previous",ya(a,b),"next","last"]},_numbers:ya,numbers_length:7});h.extend(!0,m.ext.renderer,{pageButton:{_:function(a,b,c,d,e,f){var g=a.oClasses,j=a.oLanguage.oPaginate,i=a.oLanguage.oAria.paginate||{},k,l,m=0,p=function(b,d){var o,r,u,s,v=function(b){Ta(a,b.data.action,true)};o=0;for(r=d.length;o<r;o++){s=d[o];if(h.isArray(s)){u=h("<"+(s.DT_el||"div")+"/>").appendTo(b);p(u,s)}else{k=null;
l="";switch(s){case "ellipsis":b.append('<span class="ellipsis">…</span>');break;case "first":k=j.sFirst;l=s+(e>0?"":" "+g.sPageButtonDisabled);break;case "previous":k=j.sPrevious;l=s+(e>0?"":" "+g.sPageButtonDisabled);break;case "next":k=j.sNext;l=s+(e<f-1?"":" "+g.sPageButtonDisabled);break;case "last":k=j.sLast;l=s+(e<f-1?"":" "+g.sPageButtonDisabled);break;default:k=s+1;l=e===s?g.sPageButtonActive:""}if(k!==null){u=h("<a>",{"class":g.sPageButton+" "+l,"aria-controls":a.sTableId,"aria-label":i[s],
"data-dt-idx":m,tabindex:a.iTabIndex,id:c===0&&typeof s==="string"?a.sTableId+"_"+s:null}).html(k).appendTo(b);Wa(u,{action:s},v);m++}}}},r;try{r=h(b).find(I.activeElement).data("dt-idx")}catch(o){}p(h(b).empty(),d);r&&h(b).find("[data-dt-idx="+r+"]").focus()}}});h.extend(m.ext.type.detect,[function(a,b){var c=b.oLanguage.sDecimal;return Za(a,c)?"num"+c:null},function(a){if(a&&!(a instanceof Date)&&(!ac.test(a)||!bc.test(a)))return null;var b=Date.parse(a);return null!==b&&!isNaN(b)||M(a)?"date":
null},function(a,b){var c=b.oLanguage.sDecimal;return Za(a,c,!0)?"num-fmt"+c:null},function(a,b){var c=b.oLanguage.sDecimal;return Rb(a,c)?"html-num"+c:null},function(a,b){var c=b.oLanguage.sDecimal;return Rb(a,c,!0)?"html-num-fmt"+c:null},function(a){return M(a)||"string"===typeof a&&-1!==a.indexOf("<")?"html":null}]);h.extend(m.ext.type.search,{html:function(a){return M(a)?a:"string"===typeof a?a.replace(Ob," ").replace(Aa,""):""},string:function(a){return M(a)?a:"string"===typeof a?a.replace(Ob,
" "):a}});var za=function(a,b,c,d){if(0!==a&&(!a||"-"===a))return-Infinity;b&&(a=Qb(a,b));a.replace&&(c&&(a=a.replace(c,"")),d&&(a=a.replace(d,"")));return 1*a};h.extend(v.type.order,{"date-pre":function(a){return Date.parse(a)||0},"html-pre":function(a){return M(a)?"":a.replace?a.replace(/<.*?>/g,"").toLowerCase():a+""},"string-pre":function(a){return M(a)?"":"string"===typeof a?a.toLowerCase():!a.toString?"":a.toString()},"string-asc":function(a,b){return a<b?-1:a>b?1:0},"string-desc":function(a,
b){return a<b?1:a>b?-1:0}});db("");h.extend(!0,m.ext.renderer,{header:{_:function(a,b,c,d){h(a.nTable).on("order.dt.DT",function(e,f,g,h){if(a===f){e=c.idx;b.removeClass(c.sSortingClass+" "+d.sSortAsc+" "+d.sSortDesc).addClass(h[e]=="asc"?d.sSortAsc:h[e]=="desc"?d.sSortDesc:c.sSortingClass)}})},jqueryui:function(a,b,c,d){h("<div/>").addClass(d.sSortJUIWrapper).append(b.contents()).append(h("<span/>").addClass(d.sSortIcon+" "+c.sSortingClassJUI)).appendTo(b);h(a.nTable).on("order.dt.DT",function(e,
f,g,h){if(a===f){e=c.idx;b.removeClass(d.sSortAsc+" "+d.sSortDesc).addClass(h[e]=="asc"?d.sSortAsc:h[e]=="desc"?d.sSortDesc:c.sSortingClass);b.find("span."+d.sSortIcon).removeClass(d.sSortJUIAsc+" "+d.sSortJUIDesc+" "+d.sSortJUI+" "+d.sSortJUIAscAllowed+" "+d.sSortJUIDescAllowed).addClass(h[e]=="asc"?d.sSortJUIAsc:h[e]=="desc"?d.sSortJUIDesc:c.sSortingClassJUI)}})}}});var Yb=function(a){return"string"===typeof a?a.replace(/</g,"<").replace(/>/g,">").replace(/"/g,"""):a};m.render={number:function(a,
b,c,d,e){return{display:function(f){if("number"!==typeof f&&"string"!==typeof f)return f;var g=0>f?"-":"",h=parseFloat(f);if(isNaN(h))return Yb(f);f=Math.abs(h);h=parseInt(f,10);f=c?b+(f-h).toFixed(c).substring(2):"";return g+(d||"")+h.toString().replace(/\B(?=(\d{3})+(?!\d))/g,a)+f+(e||"")}}},text:function(){return{display:Yb}}};h.extend(m.ext.internal,{_fnExternApiFunc:Nb,_fnBuildAjax:ra,_fnAjaxUpdate:lb,_fnAjaxParameters:ub,_fnAjaxUpdateDraw:vb,_fnAjaxDataSrc:sa,_fnAddColumn:Ea,_fnColumnOptions:ja,
_fnAdjustColumnSizing:Y,_fnVisibleToColumnIndex:Z,_fnColumnIndexToVisible:$,_fnVisbleColumns:aa,_fnGetColumns:la,_fnColumnTypes:Ga,_fnApplyColumnDefs:ib,_fnHungarianMap:X,_fnCamelToHungarian:K,_fnLanguageCompat:Da,_fnBrowserDetect:gb,_fnAddData:N,_fnAddTr:ma,_fnNodeToDataIndex:function(a,b){return b._DT_RowIndex!==k?b._DT_RowIndex:null},_fnNodeToColumnIndex:function(a,b,c){return h.inArray(c,a.aoData[b].anCells)},_fnGetCellData:B,_fnSetCellData:jb,_fnSplitObjNotation:Ja,_fnGetObjectDataFn:Q,_fnSetObjectDataFn:R,
_fnGetDataMaster:Ka,_fnClearTable:na,_fnDeleteIndex:oa,_fnInvalidate:ca,_fnGetRowElements:Ia,_fnCreateTr:Ha,_fnBuildHead:kb,_fnDrawHead:ea,_fnDraw:O,_fnReDraw:T,_fnAddOptionsHtml:nb,_fnDetectHeader:da,_fnGetUniqueThs:qa,_fnFeatureHtmlFilter:pb,_fnFilterComplete:fa,_fnFilterCustom:yb,_fnFilterColumn:xb,_fnFilter:wb,_fnFilterCreateSearch:Pa,_fnEscapeRegex:Qa,_fnFilterData:zb,_fnFeatureHtmlInfo:sb,_fnUpdateInfo:Cb,_fnInfoMacros:Db,_fnInitialise:ga,_fnInitComplete:ta,_fnLengthChange:Ra,_fnFeatureHtmlLength:ob,
_fnFeatureHtmlPaginate:tb,_fnPageChange:Ta,_fnFeatureHtmlProcessing:qb,_fnProcessingDisplay:C,_fnFeatureHtmlTable:rb,_fnScrollDraw:ka,_fnApplyToChildren:J,_fnCalculateColumnWidths:Fa,_fnThrottle:Oa,_fnConvertToWidth:Fb,_fnGetWidestNode:Gb,_fnGetMaxLenString:Hb,_fnStringToCss:x,_fnSortFlatten:V,_fnSort:mb,_fnSortAria:Jb,_fnSortListener:Va,_fnSortAttachListener:Ma,_fnSortingClasses:va,_fnSortData:Ib,_fnSaveState:wa,_fnLoadState:Kb,_fnSettingsFromNode:xa,_fnLog:L,_fnMap:E,_fnBindAction:Wa,_fnCallbackReg:z,
_fnCallbackFire:u,_fnLengthOverflow:Sa,_fnRenderer:Na,_fnDataSource:y,_fnRowAttributes:La,_fnCalculateEnd:function(){}});h.fn.dataTable=m;m.$=h;h.fn.dataTableSettings=m.settings;h.fn.dataTableExt=m.ext;h.fn.DataTable=function(a){return h(this).dataTable(a).api()};h.each(m,function(a,b){h.fn.DataTable[a]=b});return h.fn.dataTable});
/*jquery.dataTables.columnFilter.min.js*/
(function(a){a.fn.columnFilter=function(G){function C(a,c,b,e,f){if("undefined"==typeof c)return[];"undefined"==typeof b&&(b=!0);"undefined"==typeof e&&(e=!0);"undefined"==typeof f&&(f=!0);a=1==e?a.aiDisplay:a.aiDisplayMaster;e=[];for(var z=0,d=a.length;z<d;z++){var n=g.fnGetData(a[z])[c];if(1!=f||0!=n.length)1==b&&-1<jQuery.inArray(n,e)||e.push(n)}return e.sort()}function r(a){return t.bUseColVis?a:g.fnSettings().oApi._fnVisibleToColumnIndex(g.fnSettings(),a)}function D(h,c,b,e,f,z){var d="text_filter form-control";
e&&(d="number_filter form-control");w=w.replace(/(^\s*)|(\s*$)/g,"");var n=h.fnSettings().aoPreSearchCols[l].sSearch,g="search_init ",v=w;""!=n&&"^"!=n&&(v=e&&"^"==n.charAt(0)?n.substr(1):n,g="");d=a('<input type="text" class="'+g+d+'" value="'+v+'" rel="'+l+'"/>');void 0!=z&&-1!=z&&d.attr("maxlength",z);m.html(d);e?m.wrapInner('<span class="filter_column filter_number" />'):m.wrapInner('<span class="filter_column filter_text" />');A[l]=w;var q=l;e&&!h.fnSettings().oFeatures.bServerSide?d.keyup(function(){h.fnFilter("^"+
this.value,r(q),!0,!1);x()}):d.keyup(function(){if(h.fnSettings().oFeatures.bServerSide&&0!=f){h.fnSettings();var d=a(this).data("dt-iLastFilterLength");"undefined"==typeof d&&(d=0);var e=this.value.length;if(Math.abs(e-d)<f)return;a(this).data("dt-iLastFilterLength",e)}h.fnFilter(this.value,r(q),c,b);x()});d.focus(function(){a(this).hasClass("search_init")&&(a(this).removeClass("search_init"),this.value="")});d.blur(function(){""==this.value&&(a(this).addClass("search_init"),this.value=A[q])})}function H(h){m.html(B(0));
var c=h.attr("id")+"_range_from_"+l,b=a('<input type="text" class="number_range_filter form-control" id="'+c+'" rel="'+l+'"/>');m.append(b);m.append(B(1));var e=h.attr("id")+"_range_to_"+l,b=a('<input type="text" class="number_range_filter form-control" id="'+e+'" rel="'+l+'"/>');m.append(b);m.append(B(2));m.wrapInner('<span class="filter_column filter_number_range form-control" />');var f=l;y.push(l);h.dataTableExt.afnFiltering.push(function(a,d,b){if(h.attr("id")!=a.sTableId||null==document.getElementById(c))return!0;
a=1*document.getElementById(c).value;b=1*document.getElementById(e).value;d="-"==d[r(f)]?0:1*d[r(f)];return""==a&&""==b||""==a&&d<=b||a<=d&&""==b||a<=d&&d<=b?!0:!1});a("#"+c+",#"+e,m).keyup(function(){var a=1*document.getElementById(c).value,b=1*document.getElementById(e).value;0!=a&&0!=b&&a>b||(h.fnDraw(),x())})}function I(h){var c=u.split(/[}{]/);m.html("");var b=h.attr("id")+"_range_from_"+l,e=a('<input type="text" class="date_range_filter form-control" id="'+b+'" rel="'+l+'"/>');e.datepicker();
var f=h.attr("id")+"_range_to_"+l,g=a('<input type="text" class="date_range_filter form-control" id="'+f+'" rel="'+l+'"/>');for(ti=0;ti<c.length;ti++)c[ti]==t.sDateFromToken?m.append(e):c[ti]==t.sDateToToken?m.append(g):m.append(c[ti]);m.wrapInner('<span class="filter_column filter_date_range" />');g.datepicker();var d=l;y.push(l);h.dataTableExt.afnFiltering.push(function(b,c,f){if(h.attr("id")!=b.sTableId)return!0;b=e.datepicker("getDate");f=g.datepicker("getDate");if(null==b&&null==f)return!0;var l=
null;try{if(null==c[r(d)]||""==c[r(d)])return!1;l=a.datepicker.parseDate(a.datepicker.regional[""].dateFormat,c[r(d)])}catch(m){return!1}return null==l?!1:null==b&&l<=f||b<=l&&null==f||b<=l&&l<=f?!0:!1});a("#"+b+",#"+f,m).change(function(){h.fnDraw();x()})}function E(h,c,b,e,f,g,d,m){null==c&&(c=C(h.fnSettings(),b,!0,!1,!0));var p=h.fnSettings().aoPreSearchCols[l].sSearch;if(null==p||""==p)p=d;f='<select class="search_init select_filter form-control" rel="'+l+'"><option value="" class="search_init">'+
f+"</option>";m&&(f='<select class="search_init select_filter form-control" rel="'+l+'" multiple>');d=0;var v=c.length;for(d=0;d<v;d++)if("object"!=typeof c[d]){var q="";if(escape(c[d])==p||escape(c[d])==escape(p))q="selected ";f+="<option "+q+' value="'+escape(c[d])+'">'+c[d]+"</option>"}else q="",g?(c[d].value==p&&(q="selected "),f+="<option "+q+'value="'+c[d].value+'">'+c[d].label+"</option>"):(escape(c[d].value)==p&&(q="selected "),f+="<option "+q+'value="'+escape(c[d].value)+'">'+c[d].label+
"</option>");c=a(f+"</select>");e.html(c);e.wrapInner('<span class="filter_column filter_select" />');m?c.change(function(){""!=a(this).val()?a(this).removeClass("search_init"):a(this).addClass("search_init");var c=a(this).val(),d=[];null==c||c==[]?c="^(.*)$":(a.each(c,function(a,b){d.push(b.replace(/[-[\]{}()*+?.,\\^$|#\s]/g,"\\$&"))}),c="^("+d.join("|")+")$");h.fnFilter(c,b,!0,!1)}):(c.change(function(){""!=a(this).val()?a(this).removeClass("search_init"):a(this).addClass("search_init");g?h.fnFilter(a(this).val(),
b,g):h.fnFilter(unescape(a(this).val()),b);x()}),null!=p&&""!=p&&h.fnFilter(unescape(p),b))}function J(a,c,b,e,f){var g=a.fnSettings();null!=c&&"function"!=typeof c||""==g.sAjaxSource||g.oFeatures.bServerSide||g.aoDrawCallback.push({fn:function(d,g,l){return function(m){if(2==m.iDraw&&null!=m.sAjaxSource&&""!=m.sAjaxSource&&!m.oFeatures.bServerSide)return E(a,c&&c(m.aoData,m),r(d),g,l,b,e,f)}}(l,m,w),sName:"column_filter_"+l});E(a,"function"==typeof c?null:c,r(l),m,w,b,e,f)}function K(h){var c=l,
b='<div class="dropdown select_filter form-control"><a class="dropdown-toggle" data-toggle="dropdown" href="#">'+w+'<b class="caret"></b></a><ul class="dropdown-menu" role="menu"><li data-value=""><a>Show All</a></li>',e,f=h.length;for(e=0;e<f;e++)b+='<li data-value="'+h[e]+'"><a>'+h[e]+"</a></li>";h=a(b+"</ul></div>");m.html(h);m.wrapInner('<span class="filterColumn filter_select" />');h.find("li").click(function(){g.fnFilter(a(this).data("value"),c)})}function L(h,c){null==c&&(c=C(h.fnSettings(),
l,!0,!0,!0));var b=l,e="",f,g=c.length,d=w.replace("%","Perc").replace("&","AND").replace("$","DOL").replace("\u00a3","STERL").replace("@","AT").replace(/\s/g,"_"),d=d.replace(/[^a-zA-Z 0-9]+/g,"");f=w;if(null!=t.sFilterButtonText||void 0!=t.sFilterButtonText)f=t.sFilterButtonText;var n=Math.floor(g/12);0<g%12&&(n+=1);var p=100/n-2,v=10*n;1==n&&(p=20);var n='<div style="float:left; min-width: '+p+'%; " >',p=h.attr("id")+d,q="chkBtnOpen"+p,r=p+"-flt-toggle",e=e+('<button id="'+q+'" class="checkbox_filter btn btn-default" > '+
f+"</button>")+('<div id="'+r+'" title="'+w+'" rel="'+l+'" class="toggle-check ui-widget-content ui-corner-all" style="width: '+v+'%; " >'),e=e+n;for(f=0;f<g;f++){0==f%12&&0!=f&&(e+="</div>"+n);var v=c[f],u=c[f];"object"==typeof c[f]&&(v=c[f].label,u=c[f].value);e+='<input class="search_init checkbox_filter btn btn-default" type="checkbox" id= "'+p+"_cb_"+u+'" name= "'+d+'" value="'+u+'" >'+v+"<br/>";v=a(e);m.html(v);m.wrapInner('<span class="filter_column filter_checkbox" />');v.change(function(){var c=
"",e="|",f=a('input:checkbox[name="'+d+'"]:checked').size();a('input:checkbox[name="'+d+'"]:checked').each(function(b){if(0==b&&1==f||0!=b&&b==f-1)e="";c=c.replace(/^\s+|\s+$/g,"");c=c+a(this).val()+e;e="|"});""!=c?a('input:checkbox[name="'+d+'"]').removeClass("search_init"):a('input:checkbox[name="'+d+'"]').addClass("search_init");h.fnFilter(c,b,!0,!1);x()})}a("#"+q).button();a("#"+r).dialog({autoOpen:!1,hide:"blind",buttons:[{text:"Reset",click:function(){a('input:checkbox[name="'+d+'"]:checked').each(function(b){a(this).attr("checked",
!1);a(this).addClass("search_init")});h.fnFilter("",b,!0,!1);x();return!1}},{text:"Close",click:function(){a(this).dialog("close")}}]});a("#"+q).click(function(){a("#"+r).dialog("open");var b=a(this);a("#"+r).dialog("widget").position({my:"top",at:"bottom",of:b});return!1});var y=x;x=function(){var b=a("#"+q);a("#"+r).dialog("widget").position({my:"top",at:"bottom",of:b});y()}}function B(a){switch(a){case 0:return u.substring(0,u.indexOf("{from}"));case 1:return u.substring(u.indexOf("{from}")+6,
u.indexOf("{to}"));default:return u.substring(u.indexOf("{to}")+4)}}var A,l,w,m,u="From {from} to {to}",F=[],y=[],x=function(){},g=this,t=a.extend({sPlaceHolder:"foot",sRangeSeparator:"~",iFilteringDelay:500,aoColumns:null,sRangeFormat:"From {from} to {to}",sDateFromToken:"from",sDateToToken:"to"},G);return this.each(function(){if(g.fnSettings().oFeatures.bFilter){A=[];var h=g.fnSettings().aoFooter[0];g.fnSettings();"head:after"==t.sPlaceHolder?(h=a("tr:first",g.fnSettings().nTHead).detach(),g.fnSettings().bSortCellsTop?
(h.prependTo(a(g.fnSettings().nTHead)),h=g.fnSettings().aoHeader[1]):(h.appendTo(a(g.fnSettings().nTHead)),h=g.fnSettings().aoHeader[0]),g.fnSettings()):"head:before"==t.sPlaceHolder&&(g.fnSettings().bSortCellsTop?(h=a("tr:first",g.fnSettings().nTHead).detach(),h.appendTo(a(g.fnSettings().nTHead)),h=g.fnSettings().aoHeader[1]):h=g.fnSettings().aoHeader[0],g.fnSettings());a(h).each(function(b){l=b;b={type:"text",bRegex:!1,bSmart:!0,iMaxLenght:-1,iFilterLength:0};if(null!=t.aoColumns){if(t.aoColumns.length<
l||null==t.aoColumns[l])return;b=t.aoColumns[l]}w=a(a(this)[0].cell).text();null==b.sSelector?m=a(a(this)[0].cell):(m=a(b.sSelector),0==m.length&&(m=a(a(this)[0].cell)));if(null!=b)switch(u=null!=b.sRangeFormat?b.sRangeFormat:t.sRangeFormat,b.type){case "null":break;case "number":D(g,!0,!1,!0,b.iFilterLength,b.iMaxLenght);break;case "select":1!=b.bRegex&&(b.bRegex=!1);J(g,b.values,b.bRegex,b.selected,b.multiple);break;case "number-range":H(g);break;case "date-range":I(g);break;case "checkbox":L(g,
b.values);break;case "twitter-dropdown":case "dropdown":K(b.values);break;default:bRegex=null==b.bRegex?!1:b.bRegex,bSmart=null==b.bSmart?!1:b.bSmart,D(g,bRegex,bSmart,!1,b.iFilterLength,b.iMaxLenght)}});for(j=0;j<y.length;j++)F.push(function(){var b=g.attr("id");return a("#"+b+"_range_from_"+y[j]).val()+t.sRangeSeparator+a("#"+b+"_range_to_"+y[j]).val()});if(g.fnSettings().oFeatures.bServerSide){var c=g.fnSettings().fnServerData;g.fnSettings().fnServerData=function(b,e,f){for(j=0;j<y.length;j++){var h=
y[j];for(k=0;k<e.length;k++)e[k].name=="sSearch_"+h&&(e[k].value=F[j]())}e.push({name:"sRangeSeparator",value:t.sRangeSeparator});if(null!=c)try{c(b,e,f,g.fnSettings())}catch(d){c(b,e,f)}else a.getJSON(b,e,function(a){f(a)})}}}})}})(jQuery);
/* moment-timezone-with-data-2010-2020.min.js */
(function(d,m){"function"===typeof define&&define.amd?define(["moment"],m):"object"===typeof exports?module.exports=m(require("moment")):m(d.moment)})(this,function(d){function m(a){return 96<a?a-87:64<a?a-29:a-48}function w(a){var b=0,c=a.split("."),g=c[0],c=c[1]||"",d=1,e=0,f=1;45===a.charCodeAt(0)&&(b=1,f=-1);for(b;b<g.length;b++)a=m(g.charCodeAt(b)),e=60*e+a;for(b=0;b<c.length;b++)d/=60,a=m(c.charCodeAt(b)),e+=a*d;return e*f}function u(a){for(var b=0;b<a.length;b++)a[b]=w(a[b])}function x(a,b){var c=
[],g;for(g=0;g<b.length;g++)c[g]=a[b[g]];return c}function y(a){a=a.split("|");var b=a[2].split(" "),c=a[3].split(""),g=a[4].split(" ");u(b);u(c);u(g);for(var e=c.length,d=0;d<e;d++)g[d]=Math.round((g[d-1]||0)+6E4*g[d]);g[e-1]=Infinity;return{name:a[0],abbrs:x(a[1].split(" "),c),offsets:x(b,c),untils:g}}function p(a){a&&this._set(y(a))}function q(a){return(a||"").toLowerCase().replace(/\//g,"_")}function z(a){var b,c,d;"string"===typeof a&&(a=[a]);for(b=0;b<a.length;b++)if(c=new p(a[b]),d=q(c.name),
h[d]=c,c=d,k[c]){d=void 0;var e=h[c],f=k[c];for(d=0;d<f.length;d++)A(e,f[d]);k[c]=null}}function n(a){return h[q(a)]||null}function B(a){var b,c;"string"===typeof a&&(a=[a]);for(b=0;b<a.length;b++)c=a[b].split("|"),C(c[0],c[1]),C(c[1],c[0])}function A(a,b){var c=h[q(b)]=new p;c._set(a);c.name=b}function C(a,b){a=q(a);h[a]?A(h[a],b):(k[a]=k[a]||[],k[a].push(b))}function D(a){z(a.zones);B(a.links);e.dataVersion=a.version}function v(a){v.didShowError||(v.didShowError=!0,r("moment.tz.zoneExists('"+a+
"') has been deprecated in favor of !moment.tz.zone('"+a+"')"));return!!n(a)}function E(a){return!(!a._a||void 0!==a._tzm)}function r(a){"undefined"!==typeof console&&"function"===typeof console.error&&console.error(a)}function e(a){var b=Array.prototype.slice.call(arguments,0,-1),c=arguments[arguments.length-1],e=n(c),b=d.utc.apply(null,b);e&&!d.isMoment(a)&&E(b)&&b.add(e.parse(b),"minutes");b.tz(c);return b}function F(a){return function(){return this._z?this._z.abbr(this):a.call(this)}}if(void 0!==
d.tz)return d;var h={},k={},f=d.version.split("."),t=+f[0],G=+f[1];(2>t||2===t&&6>G)&&r("Moment Timezone requires Moment.js >= 2.6.0. You are using Moment.js "+d.version+". See momentjs.com");p.prototype={_set:function(a){this.name=a.name;this.abbrs=a.abbrs;this.untils=a.untils;this.offsets=a.offsets},_index:function(a){a=+a;var b=this.untils,c;for(c=0;c<b.length;c++)if(a<b[c])return c},parse:function(a){a=+a;var b=this.offsets,c=this.untils,d=c.length-1,f,h,k,l;for(l=0;l<d;l++)if(f=b[l],h=b[l+1],
k=b[l?l-1:l],f<h&&e.moveAmbiguousForward?f=h:f>k&&e.moveInvalidForward&&(f=k),a<c[l]-6E4*f)return b[l];return b[d]},abbr:function(a){return this.abbrs[this._index(a)]},offset:function(a){return this.offsets[this._index(a)]}};e.version="0.3.0";e.dataVersion="";e._zones=h;e._links=k;e.add=z;e.link=B;e.load=D;e.zone=n;e.zoneExists=v;e.names=function(){var a,b=[];for(a in h)h.hasOwnProperty(a)&&h[a]&&b.push(h[a].name);return b.sort()};e.Zone=p;e.unpack=y;e.unpackBase60=w;e.needsOffset=E;e.moveInvalidForward=
!0;e.moveAmbiguousForward=!1;f=d.fn;d.tz=e;d.defaultZone=null;d.updateOffset=function(a,b){var c;void 0===a._z&&(a._z=d.defaultZone);a._z&&(c=a._z.offset(a),16>Math.abs(c)&&(c/=60),void 0!==a.utcOffset?a.utcOffset(-c,b):a.zone(c,b))};f.tz=function(a){if(a)return(this._z=n(a))?d.updateOffset(this):r("Moment Timezone has no data for "+a+". See http://momentjs.com/timezone/docs/#/data-loading/."),this;if(this._z)return this._z.name};f.zoneName=F(f.zoneName);f.zoneAbbr=F(f.zoneAbbr);f.utc=function(a){return function(){this._z=
null;return a.apply(this,arguments)}}(f.utc);d.tz.setDefault=function(a){(2>t||2===t&&9>G)&&r("Moment Timezone setDefault() requires Moment.js >= 2.9.0. You are using Moment.js "+d.version+".");d.defaultZone=a?n(a):null;return d};f=d.momentProperties;"[object Array]"===Object.prototype.toString.call(f)?(f.push("_z"),f.push("_a")):f&&(f._z=null);D({version:"2014j",zones:"Africa/Abidjan|GMT|0|0|;Africa/Addis_Ababa|EAT|-30|0|;Africa/Algiers|CET|-10|0|;Africa/Bangui|WAT|-10|0|;Africa/Blantyre|CAT|-20|0|;Africa/Cairo|EET EEST|-20 -30|0101010101010101010101010101010|1Cby0 Fb0 c10 8n0 8Nd0 gL0 e10 mn0 1o10 jz0 gN0 pb0 1qN0 dX0 e10 xz0 1o10 bb0 e10 An0 1o10 5z0 e10 FX0 1o10 2L0 e10 IL0 1C10 Lz0;Africa/Casablanca|WET WEST|0 -10|01010101010101010101010101010101010101010|1Cco0 Db0 1zd0 Lz0 1Nf0 wM0 co0 go0 1o00 s00 dA0 vc0 11A0 A00 e00 y00 11A0 uo0 e00 DA0 11A0 rA0 e00 Jc0 WM0 m00 gM0 M00 WM0 jc0 e00 RA0 11A0 dA0 e00 Uo0 11A0 800 gM0 Xc0;Africa/Ceuta|CET CEST|-10 -20|01010101010101010101010|1BWp0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00;Africa/Johannesburg|SAST|-20|0|;Africa/Tripoli|EET CET CEST|-20 -10 -20|0120|1IlA0 TA0 1o00;Africa/Windhoek|WAST WAT|-20 -10|01010101010101010101010|1C1c0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 11B0;America/Adak|HAST HADT|a0 90|01010101010101010101010|1BR00 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0;America/Anchorage|AKST AKDT|90 80|01010101010101010101010|1BQX0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0;America/Anguilla|AST|40|0|;America/Araguaina|BRT BRST|30 20|010|1IdD0 Lz0;America/Argentina/Buenos_Aires|ART|30|0|;America/Asuncion|PYST PYT|30 40|01010101010101010101010|1C430 1a10 1fz0 1a10 1fz0 1cN0 17b0 1ip0 17b0 1ip0 17b0 1ip0 19X0 1fB0 19X0 1fB0 19X0 1ip0 17b0 1ip0 17b0 1ip0;America/Atikokan|EST|50|0|;America/Bahia|BRT BRST|30 20|010|1FJf0 Rb0;America/Bahia_Banderas|MST CDT CST|70 50 60|01212121212121212121212|1C1l0 1nW0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0;America/Belem|BRT|30|0|;America/Belize|CST|60|0|;America/Boa_Vista|AMT|40|0|;America/Bogota|COT|50|0|;America/Boise|MST MDT|70 60|01010101010101010101010|1BQV0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0;America/Campo_Grande|AMST AMT|30 40|01010101010101010101010|1BIr0 1zd0 On0 1zd0 Rb0 1zd0 Lz0 1C10 Lz0 1C10 On0 1zd0 On0 1zd0 On0 1zd0 On0 1C10 Lz0 1C10 Lz0 1C10;America/Cancun|CST CDT|60 50|01010101010101010101010|1C1k0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0;America/Caracas|VET|4u|0|;America/Cayenne|GFT|30|0|;America/Chicago|CST CDT|60 50|01010101010101010101010|1BQU0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0;America/Chihuahua|MST MDT|70 60|01010101010101010101010|1C1l0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0;America/Creston|MST|70|0|;America/Dawson|PST PDT|80 70|01010101010101010101010|1BQW0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0;America/Detroit|EST EDT|50 40|01010101010101010101010|1BQT0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0;America/Eirunepe|AMT ACT|40 50|01|1KLE0;America/Glace_Bay|AST ADT|40 30|01010101010101010101010|1BQS0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0;America/Godthab|WGT WGST|30 20|01010101010101010101010|1BWp0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00;America/Goose_Bay|AST ADT|40 30|01010101010101010101010|1BQQ1 1zb0 Op0 1zcX Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0;America/Grand_Turk|EST EDT AST|50 40 40|0101010101012|1BQT0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0;America/Guayaquil|ECT|50|0|;America/Guyana|GYT|40|0|;America/Havana|CST CDT|50 40|01010101010101010101010|1BQR0 1wo0 U00 1zc0 U00 1qM0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Rc0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0 Oo0 1zc0;America/La_Paz|BOT|40|0|;America/Lima|PET|50|0|;America/Metlakatla|PST|80|0|;America/Miquelon|PMST PMDT|30 20|01010101010101010101010|1BQR0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0;America/Montevideo|UYST UYT|20 30|01010101010101010101010|1BQQ0 1ld0 14n0 1ld0 14n0 1o10 11z0 1o10 11z0 1o10 11z0 1o10 14n0 1ld0 14n0 1ld0 14n0 1o10 11z0 1o10 11z0 1o10;America/Noronha|FNT|20|0|;America/North_Dakota/Beulah|MST MDT CST CDT|70 60 60 50|01232323232323232323232|1BQV0 1zb0 Oo0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0;America/Paramaribo|SRT|30|0|;America/Port-au-Prince|EST EDT|50 40|0101010101010101010|1GI70 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0;America/Santa_Isabel|PST PDT|80 70|01010101010101010101010|1C1m0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0 14p0 1lb0 14p0 1nX0 11B0 1nX0 11B0 1nX0 14p0 1lb0 14p0 1lb0;America/Santiago|CLST CLT|30 40|01010101010101010101010|1C1f0 1fB0 1nX0 G10 1EL0 Op0 1zb0 Rd0 1wn0 Rd0 1wn0 Rd0 1wn0 Rd0 1wn0 Rd0 1zb0 Op0 1zb0 Rd0 1wn0 Rd0;America/Sao_Paulo|BRST BRT|20 30|01010101010101010101010|1BIq0 1zd0 On0 1zd0 Rb0 1zd0 Lz0 1C10 Lz0 1C10 On0 1zd0 On0 1zd0 On0 1zd0 On0 1C10 Lz0 1C10 Lz0 1C10;America/Scoresbysund|EGT EGST|10 0|01010101010101010101010|1BWp0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00;America/St_Johns|NST NDT|3u 2u|01010101010101010101010|1BQPv 1zb0 Op0 1zcX Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Rd0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0 Op0 1zb0;Antarctica/Casey|CAST AWST|-b0 -80|0101|1BN30 40P0 KL0;Antarctica/Davis|DAVT DAVT|-50 -70|0101|1BPw0 3Wn0 KN0;Antarctica/DumontDUrville|DDUT|-a0|0|;Antarctica/Macquarie|AEDT MIST|-b0 -b0|01|1C140;Antarctica/Mawson|MAWT|-50|0|;Antarctica/McMurdo|NZDT NZST|-d0 -c0|01010101010101010101010|1C120 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00;Antarctica/Rothera|ROTT|30|0|;Antarctica/Syowa|SYOT|-30|0|;Antarctica/Troll|UTC CEST|0 -20|01010101010101010101010|1BWp0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00;Antarctica/Vostok|VOST|-60|0|;Asia/Aden|AST|-30|0|;Asia/Almaty|ALMT|-60|0|;Asia/Amman|EET EEST|-20 -30|010101010101010101010|1BVy0 1qM0 11A0 1o00 11A0 4bX0 Dd0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0;Asia/Anadyr|ANAT ANAST ANAT|-c0 -c0 -b0|0120|1BWe0 1qN0 WM0;Asia/Aqtau|AQTT|-50|0|;Asia/Ashgabat|TMT|-50|0|;Asia/Baku|AZT AZST|-40 -50|01010101010101010101010|1BWo0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00;Asia/Bangkok|ICT|-70|0|;Asia/Beirut|EET EEST|-20 -30|01010101010101010101010|1BWm0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0;Asia/Bishkek|KGT|-60|0|;Asia/Brunei|BNT|-80|0|;Asia/Calcutta|IST|-5u|0|;Asia/Chita|YAKT YAKST YAKT IRKT|-90 -a0 -a0 -80|01023|1BWh0 1qM0 WM0 8Hz0;Asia/Choibalsan|CHOT|-80|0|;Asia/Chongqing|CST|-80|0|;Asia/Dacca|BDT|-60|0|;Asia/Damascus|EET EEST|-20 -30|01010101010101010101010|1C0m0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0 WN0 1qL0 WN0 1qL0 11B0 1nX0 11B0 1nX0 11B0 1nX0 11B0 1qL0;Asia/Dili|TLT|-90|0|;Asia/Dubai|GST|-40|0|;Asia/Dushanbe|TJT|-50|0|;Asia/Gaza|EET EEST|-20 -30|01010101010101010101010|1BVW1 SKX 1xd1 MKX 1AN0 1a00 1fA0 1cL0 1cN0 1cL0 1cN0 1cL0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1fB0 1cL0 1cN0 1cL0;Asia/Hebron|EET EEST|-20 -30|0101010101010101010101010|1BVy0 Tb0 1xd1 MKX bB0 cn0 1cN0 1a00 1fA0 1cL0 1cN0 1cL0 1cN0 1cL0 1fB0 19X0 1fB0 19X0 1fB0 19X0 1fB0 1cL0 1cN0 1cL0;Asia/Hong_Kong|HKT|-80|0|;Asia/Hovd|HOVT|-70|0|;Asia/Irkutsk|IRKT IRKST IRKT|-80 -90 -90|01020|1BWi0 1qM0 WM0 8Hz0;Asia/Istanbul|EET EEST|-20 -30|01010101010101010101010|1BWp0 1qM0 Xc0 1qo0 WM0 1qM0 11A0 1o00 1200 1nA0 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00;Asia/Jakarta|WIB|-70|0|;Asia/Jayapura|WIT|-90|0|;Asia/Jerusalem|IST IDT|-20 -30|01010101010101010101010|1BVA0 17X0 1kp0 1dz0 1c10 1aL0 1eN0 1oL0 10N0 1oL0 10N0 1oL0 10N0 1rz0 W10 1rz0 W10 1rz0 10N0 1oL0 10N0 1oL0;Asia/Kabul|AFT|-4u|0|;Asia/Kamchatka|PETT PETST PETT|-c0 -c0 -b0|0120|1BWe0 1qN0 WM0;Asia/Karachi|PKT|-50|0|;Asia/Kashgar|XJT|-60|0|;Asia/Kathmandu|NPT|-5J|0|;Asia/Khandyga|VLAT VLAST VLAT YAKT YAKT|-a0 -b0 -b0 -a0 -90|010234|1BWg0 1qM0 WM0 17V0 7zD0;Asia/Krasnoyarsk|KRAT KRAST KRAT|-70 -80 -80|01020|1BWj0 1qM0 WM0 8Hz0;Asia/Kuala_Lumpur|MYT|-80|0|;Asia/Magadan|MAGT MAGST MAGT MAGT|-b0 -c0 -c0 -a0|01023|1BWf0 1qM0 WM0 8Hz0;Asia/Makassar|WITA|-80|0|;Asia/Manila|PHT|-80|0|;Asia/Nicosia|EET EEST|-20 -30|01010101010101010101010|1BWp0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00;Asia/Novokuznetsk|KRAT NOVST NOVT NOVT|-70 -70 -60 -70|01230|1BWj0 1qN0 WM0 8Hz0;Asia/Novosibirsk|NOVT NOVST NOVT|-60 -70 -70|01020|1BWk0 1qM0 WM0 8Hz0;Asia/Omsk|OMST OMSST OMST|-60 -70 -70|01020|1BWk0 1qM0 WM0 8Hz0;Asia/Oral|ORAT|-50|0|;Asia/Pyongyang|KST|-90|0|;Asia/Qyzylorda|QYZT|-60|0|;Asia/Rangoon|MMT|-6u|0|;Asia/Sakhalin|SAKT SAKST SAKT|-a0 -b0 -b0|01020|1BWg0 1qM0 WM0 8Hz0;Asia/Samarkand|UZT|-50|0|;Asia/Singapore|SGT|-80|0|;Asia/Srednekolymsk|MAGT MAGST MAGT SRET|-b0 -c0 -c0 -b0|01023|1BWf0 1qM0 WM0 8Hz0;Asia/Tbilisi|GET|-40|0|;Asia/Tehran|IRST IRDT|-3u -4u|01010101010101010101010|1BTUu 1dz0 1cp0 1dz0 1cp0 1dz0 1cN0 1dz0 1cp0 1dz0 1cp0 1dz0 1cp0 1dz0 1cN0 1dz0 1cp0 1dz0 1cp0 1dz0 1cp0 1dz0;Asia/Thimbu|BTT|-60|0|;Asia/Tokyo|JST|-90|0|;Asia/Ulaanbaatar|ULAT|-80|0|;Asia/Ust-Nera|MAGT MAGST MAGT VLAT VLAT|-b0 -c0 -c0 -b0 -a0|010234|1BWf0 1qM0 WM0 17V0 7zD0;Asia/Vladivostok|VLAT VLAST VLAT|-a0 -b0 -b0|01020|1BWg0 1qM0 WM0 8Hz0;Asia/Yakutsk|YAKT YAKST YAKT|-90 -a0 -a0|01020|1BWh0 1qM0 WM0 8Hz0;Asia/Yekaterinburg|YEKT YEKST YEKT|-50 -60 -60|01020|1BWl0 1qM0 WM0 8Hz0;Asia/Yerevan|AMT AMST|-40 -50|01010|1BWm0 1qM0 WM0 1qM0;Atlantic/Azores|AZOT AZOST|10 0|01010101010101010101010|1BWp0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00;Atlantic/Canary|WET WEST|0 -10|01010101010101010101010|1BWp0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00;Atlantic/Cape_Verde|CVT|10|0|;Atlantic/South_Georgia|GST|20|0|;Atlantic/Stanley|FKST FKT|30 40|010|1C6R0 U10;Australia/ACT|AEDT AEST|-b0 -a0|01010101010101010101010|1C140 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0;Australia/Adelaide|ACDT ACST|-au -9u|01010101010101010101010|1C14u 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1cM0 1fA0 1cM0 1cM0 1cM0 1cM0;Australia/Brisbane|AEST|-a0|0|;Australia/Darwin|ACST|-9u|0|;Australia/Eucla|ACWST|-8J|0|;Australia/LHI|LHDT LHST|-b0 -au|01010101010101010101010|1C130 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1cMu 1cLu 1fAu 1cLu 1cMu 1cLu 1cMu;Australia/Perth|AWST|-80|0|;Chile/EasterIsland|EASST EAST|50 60|01010101010101010101010|1C1f0 1fB0 1nX0 G10 1EL0 Op0 1zb0 Rd0 1wn0 Rd0 1wn0 Rd0 1wn0 Rd0 1wn0 Rd0 1zb0 Op0 1zb0 Rd0 1wn0 Rd0;Eire|GMT IST|0 -10|01010101010101010101010|1BWp0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00;Etc/GMT+1|GMT+1|10|0|;Etc/GMT+10|GMT+10|a0|0|;Etc/GMT+11|GMT+11|b0|0|;Etc/GMT+12|GMT+12|c0|0|;Etc/GMT+2|GMT+2|20|0|;Etc/GMT+3|GMT+3|30|0|;Etc/GMT+4|GMT+4|40|0|;Etc/GMT+5|GMT+5|50|0|;Etc/GMT+6|GMT+6|60|0|;Etc/GMT+7|GMT+7|70|0|;Etc/GMT+8|GMT+8|80|0|;Etc/GMT+9|GMT+9|90|0|;Etc/GMT-1|GMT-1|-10|0|;Etc/GMT-10|GMT-10|-a0|0|;Etc/GMT-11|GMT-11|-b0|0|;Etc/GMT-12|GMT-12|-c0|0|;Etc/GMT-13|GMT-13|-d0|0|;Etc/GMT-14|GMT-14|-e0|0|;Etc/GMT-2|GMT-2|-20|0|;Etc/GMT-3|GMT-3|-30|0|;Etc/GMT-4|GMT-4|-40|0|;Etc/GMT-5|GMT-5|-50|0|;Etc/GMT-6|GMT-6|-60|0|;Etc/GMT-7|GMT-7|-70|0|;Etc/GMT-8|GMT-8|-80|0|;Etc/GMT-9|GMT-9|-90|0|;Etc/UCT|UCT|0|0|;Etc/UTC|UTC|0|0|;Europe/Belfast|GMT BST|0 -10|01010101010101010101010|1BWp0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00;Europe/Kaliningrad|EET EEST FET|-20 -30 -30|01020|1BWo0 1qM0 WM0 8Hz0;Europe/Minsk|EET EEST FET MSK|-20 -30 -30 -30|01023|1BWo0 1qM0 WM0 8Hy0;Europe/Moscow|MSK MSD MSK|-30 -40 -40|01020|1BWn0 1qM0 WM0 8Hz0;Europe/Samara|SAMT SAMST SAMT|-40 -40 -30|0120|1BWm0 1qN0 WM0;Europe/Simferopol|EET EEST MSK MSK|-20 -30 -40 -30|01010101023|1BWp0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11z0 1nW0;Europe/Volgograd|MSK MSK|-30 -40|01010|1BWn0 1qM0 WM0 8Hz0;HST|HST|a0|0|;Indian/Chagos|IOT|-60|0|;Indian/Christmas|CXT|-70|0|;Indian/Cocos|CCT|-6u|0|;Indian/Kerguelen|TFT|-50|0|;Indian/Mahe|SCT|-40|0|;Indian/Maldives|MVT|-50|0|;Indian/Mauritius|MUT|-40|0|;Indian/Reunion|RET|-40|0|;Kwajalein|MHT|-c0|0|;MET|MET MEST|-10 -20|01010101010101010101010|1BWp0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00 11A0 1o00 11A0 1qM0 WM0 1qM0 WM0 1qM0 11A0 1o00 11A0 1o00;NZ-CHAT|CHADT CHAST|-dJ -cJ|01010101010101010101010|1C120 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00;Pacific/Apia|SST SDT WSDT WSST|b0 a0 -e0 -d0|01012323232323232323232|1Dbn0 1ff0 1a00 CI0 AQ0 1cM0 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1a00 1fA0 1cM0 1fA0 1a00 1fA0 1a00;Pacific/Bougainville|PGT BST|-a0 -b0|01|1NwE0;Pacific/Chuuk|CHUT|-a0|0|;Pacific/Efate|VUT|-b0|0|;Pacific/Enderbury|PHOT|-d0|0|;Pacific/Fakaofo|TKT TKT|b0 -d0|01|1Gfn0;Pacific/Fiji|FJST FJT|-d0 -c0|01010101010101010101010|1BWe0 1o00 Rc0 1wo0 Ao0 1Nc0 Ao0 1Q00 xz0 1SN0 uM0 1SM0 xA0 1SM0 uM0 1SM0 uM0 1SM0 uM0 1SM0 uM0 1SM0;Pacific/Funafuti|TVT|-c0|0|;Pacific/Galapagos|GALT|60|0|;Pacific/Gambier|GAMT|90|0|;Pacific/Guadalcanal|SBT|-b0|0|;Pacific/Guam|ChST|-a0|0|;Pacific/Kiritimati|LINT|-e0|0|;Pacific/Kosrae|KOST|-b0|0|;Pacific/Marquesas|MART|9u|0|;Pacific/Midway|SST|b0|0|;Pacific/Nauru|NRT|-c0|0|;Pacific/Niue|NUT|b0|0|;Pacific/Norfolk|NFT|-bu|0|;Pacific/Noumea|NCT|-b0|0|;Pacific/Palau|PWT|-90|0|;Pacific/Pohnpei|PONT|-b0|0|;Pacific/Port_Moresby|PGT|-a0|0|;Pacific/Rarotonga|CKT|a0|0|;Pacific/Tahiti|TAHT|a0|0|;Pacific/Tarawa|GILT|-c0|0|;Pacific/Tongatapu|TOT|-d0|0|;Pacific/Wake|WAKT|-c0|0|;Pacific/Wallis|WFT|-c0|0|".split(";"),
links:"Africa/Abidjan|Africa/Accra Africa/Abidjan|Africa/Bamako Africa/Abidjan|Africa/Banjul Africa/Abidjan|Africa/Bissau Africa/Abidjan|Africa/Conakry Africa/Abidjan|Africa/Dakar Africa/Abidjan|Africa/Freetown Africa/Abidjan|Africa/Lome Africa/Abidjan|Africa/Monrovia Africa/Abidjan|Africa/Nouakchott Africa/Abidjan|Africa/Ouagadougou Africa/Abidjan|Africa/Sao_Tome Africa/Abidjan|Africa/Timbuktu Africa/Abidjan|America/Danmarkshavn Africa/Abidjan|Atlantic/Reykjavik Africa/Abidjan|Atlantic/St_Helena Africa/Abidjan|Etc/GMT Africa/Abidjan|Etc/GMT+0 Africa/Abidjan|Etc/GMT-0 Africa/Abidjan|Etc/GMT0 Africa/Abidjan|Etc/Greenwich Africa/Abidjan|GMT Africa/Abidjan|GMT+0 Africa/Abidjan|GMT-0 Africa/Abidjan|GMT0 Africa/Abidjan|Greenwich Africa/Abidjan|Iceland Africa/Addis_Ababa|Africa/Asmara Africa/Addis_Ababa|Africa/Asmera Africa/Addis_Ababa|Africa/Dar_es_Salaam Africa/Addis_Ababa|Africa/Djibouti Africa/Addis_Ababa|Africa/Juba Africa/Addis_Ababa|Africa/Kampala Africa/Addis_Ababa|Africa/Khartoum Africa/Addis_Ababa|Africa/Mogadishu Africa/Addis_Ababa|Africa/Nairobi Africa/Addis_Ababa|Indian/Antananarivo Africa/Addis_Ababa|Indian/Comoro Africa/Addis_Ababa|Indian/Mayotte Africa/Algiers|Africa/Tunis Africa/Bangui|Africa/Brazzaville Africa/Bangui|Africa/Douala Africa/Bangui|Africa/Kinshasa Africa/Bangui|Africa/Lagos Africa/Bangui|Africa/Libreville Africa/Bangui|Africa/Luanda Africa/Bangui|Africa/Malabo Africa/Bangui|Africa/Ndjamena Africa/Bangui|Africa/Niamey Africa/Bangui|Africa/Porto-Novo Africa/Blantyre|Africa/Bujumbura Africa/Blantyre|Africa/Gaborone Africa/Blantyre|Africa/Harare Africa/Blantyre|Africa/Kigali Africa/Blantyre|Africa/Lubumbashi Africa/Blantyre|Africa/Lusaka Africa/Blantyre|Africa/Maputo Africa/Cairo|Egypt Africa/Casablanca|Africa/El_Aaiun Africa/Ceuta|Arctic/Longyearbyen Africa/Ceuta|Atlantic/Jan_Mayen Africa/Ceuta|CET Africa/Ceuta|Europe/Amsterdam Africa/Ceuta|Europe/Andorra Africa/Ceuta|Europe/Belgrade Africa/Ceuta|Europe/Berlin Africa/Ceuta|Europe/Bratislava Africa/Ceuta|Europe/Brussels Africa/Ceuta|Europe/Budapest Africa/Ceuta|Europe/Busingen Africa/Ceuta|Europe/Copenhagen Africa/Ceuta|Europe/Gibraltar Africa/Ceuta|Europe/Ljubljana Africa/Ceuta|Europe/Luxembourg Africa/Ceuta|Europe/Madrid Africa/Ceuta|Europe/Malta Africa/Ceuta|Europe/Monaco Africa/Ceuta|Europe/Oslo Africa/Ceuta|Europe/Paris Africa/Ceuta|Europe/Podgorica Africa/Ceuta|Europe/Prague Africa/Ceuta|Europe/Rome Africa/Ceuta|Europe/San_Marino Africa/Ceuta|Europe/Sarajevo Africa/Ceuta|Europe/Skopje Africa/Ceuta|Europe/Stockholm Africa/Ceuta|Europe/Tirane Africa/Ceuta|Europe/Vaduz Africa/Ceuta|Europe/Vatican Africa/Ceuta|Europe/Vienna Africa/Ceuta|Europe/Warsaw Africa/Ceuta|Europe/Zagreb Africa/Ceuta|Europe/Zurich Africa/Ceuta|Poland Africa/Johannesburg|Africa/Maseru Africa/Johannesburg|Africa/Mbabane Africa/Tripoli|Libya America/Adak|America/Atka America/Adak|US/Aleutian America/Anchorage|America/Juneau America/Anchorage|America/Nome America/Anchorage|America/Sitka America/Anchorage|America/Yakutat America/Anchorage|US/Alaska America/Anguilla|America/Antigua America/Anguilla|America/Aruba America/Anguilla|America/Barbados America/Anguilla|America/Blanc-Sablon America/Anguilla|America/Curacao America/Anguilla|America/Dominica America/Anguilla|America/Grenada America/Anguilla|America/Guadeloupe America/Anguilla|America/Kralendijk America/Anguilla|America/Lower_Princes America/Anguilla|America/Marigot America/Anguilla|America/Martinique America/Anguilla|America/Montserrat America/Anguilla|America/Port_of_Spain America/Anguilla|America/Puerto_Rico America/Anguilla|America/Santo_Domingo America/Anguilla|America/St_Barthelemy America/Anguilla|America/St_Kitts America/Anguilla|America/St_Lucia America/Anguilla|America/St_Thomas America/Anguilla|America/St_Vincent America/Anguilla|America/Tortola America/Anguilla|America/Virgin America/Argentina/Buenos_Aires|America/Argentina/Catamarca America/Argentina/Buenos_Aires|America/Argentina/ComodRivadavia America/Argentina/Buenos_Aires|America/Argentina/Cordoba America/Argentina/Buenos_Aires|America/Argentina/Jujuy America/Argentina/Buenos_Aires|America/Argentina/La_Rioja America/Argentina/Buenos_Aires|America/Argentina/Mendoza America/Argentina/Buenos_Aires|America/Argentina/Rio_Gallegos America/Argentina/Buenos_Aires|America/Argentina/Salta America/Argentina/Buenos_Aires|America/Argentina/San_Juan America/Argentina/Buenos_Aires|America/Argentina/San_Luis America/Argentina/Buenos_Aires|America/Argentina/Tucuman America/Argentina/Buenos_Aires|America/Argentina/Ushuaia America/Argentina/Buenos_Aires|America/Buenos_Aires America/Argentina/Buenos_Aires|America/Catamarca America/Argentina/Buenos_Aires|America/Cordoba America/Argentina/Buenos_Aires|America/Jujuy America/Argentina/Buenos_Aires|America/Mendoza America/Argentina/Buenos_Aires|America/Rosario America/Atikokan|America/Cayman America/Atikokan|America/Coral_Harbour America/Atikokan|America/Jamaica America/Atikokan|America/Panama America/Atikokan|EST America/Atikokan|Jamaica America/Belem|America/Fortaleza America/Belem|America/Maceio America/Belem|America/Recife America/Belem|America/Santarem America/Belize|America/Costa_Rica America/Belize|America/El_Salvador America/Belize|America/Guatemala America/Belize|America/Managua America/Belize|America/Regina America/Belize|America/Swift_Current America/Belize|America/Tegucigalpa America/Belize|Canada/East-Saskatchewan America/Belize|Canada/Saskatchewan America/Boa_Vista|America/Manaus America/Boa_Vista|America/Porto_Velho America/Boa_Vista|Brazil/West America/Boise|America/Cambridge_Bay America/Boise|America/Denver America/Boise|America/Edmonton America/Boise|America/Inuvik America/Boise|America/Ojinaga America/Boise|America/Shiprock America/Boise|America/Yellowknife America/Boise|Canada/Mountain America/Boise|MST7MDT America/Boise|Navajo America/Boise|US/Mountain America/Campo_Grande|America/Cuiaba America/Cancun|America/Merida America/Cancun|America/Mexico_City America/Cancun|America/Monterrey America/Cancun|Mexico/General America/Chicago|America/Indiana/Knox America/Chicago|America/Indiana/Tell_City America/Chicago|America/Knox_IN America/Chicago|America/Matamoros America/Chicago|America/Menominee America/Chicago|America/North_Dakota/Center America/Chicago|America/North_Dakota/New_Salem America/Chicago|America/Rainy_River America/Chicago|America/Rankin_Inlet America/Chicago|America/Resolute America/Chicago|America/Winnipeg America/Chicago|CST6CDT America/Chicago|Canada/Central America/Chicago|US/Central America/Chicago|US/Indiana-Starke America/Chihuahua|America/Mazatlan America/Chihuahua|Mexico/BajaSur America/Creston|America/Dawson_Creek America/Creston|America/Hermosillo America/Creston|America/Phoenix America/Creston|MST America/Creston|US/Arizona America/Dawson|America/Ensenada America/Dawson|America/Los_Angeles America/Dawson|America/Tijuana America/Dawson|America/Vancouver America/Dawson|America/Whitehorse America/Dawson|Canada/Pacific America/Dawson|Canada/Yukon America/Dawson|Mexico/BajaNorte America/Dawson|PST8PDT America/Dawson|US/Pacific America/Dawson|US/Pacific-New America/Detroit|America/Fort_Wayne America/Detroit|America/Indiana/Indianapolis America/Detroit|America/Indiana/Marengo America/Detroit|America/Indiana/Petersburg America/Detroit|America/Indiana/Vevay America/Detroit|America/Indiana/Vincennes America/Detroit|America/Indiana/Winamac America/Detroit|America/Indianapolis America/Detroit|America/Iqaluit America/Detroit|America/Kentucky/Louisville America/Detroit|America/Kentucky/Monticello America/Detroit|America/Louisville America/Detroit|America/Montreal America/Detroit|America/Nassau America/Detroit|America/New_York America/Detroit|America/Nipigon America/Detroit|America/Pangnirtung America/Detroit|America/Thunder_Bay America/Detroit|America/Toronto America/Detroit|Canada/Eastern America/Detroit|EST5EDT America/Detroit|US/East-Indiana America/Detroit|US/Eastern America/Detroit|US/Michigan America/Eirunepe|America/Porto_Acre America/Eirunepe|America/Rio_Branco America/Eirunepe|Brazil/Acre America/Glace_Bay|America/Halifax America/Glace_Bay|America/Moncton America/Glace_Bay|America/Thule America/Glace_Bay|Atlantic/Bermuda America/Glace_Bay|Canada/Atlantic America/Havana|Cuba America/Metlakatla|Pacific/Pitcairn America/Noronha|Brazil/DeNoronha America/Santiago|Antarctica/Palmer America/Santiago|Chile/Continental America/Sao_Paulo|Brazil/East America/St_Johns|Canada/Newfoundland Antarctica/McMurdo|Antarctica/South_Pole Antarctica/McMurdo|NZ Antarctica/McMurdo|Pacific/Auckland Asia/Aden|Asia/Baghdad Asia/Aden|Asia/Bahrain Asia/Aden|Asia/Kuwait Asia/Aden|Asia/Qatar Asia/Aden|Asia/Riyadh Asia/Aqtau|Asia/Aqtobe Asia/Ashgabat|Asia/Ashkhabad Asia/Bangkok|Asia/Ho_Chi_Minh Asia/Bangkok|Asia/Phnom_Penh Asia/Bangkok|Asia/Saigon Asia/Bangkok|Asia/Vientiane Asia/Calcutta|Asia/Colombo Asia/Calcutta|Asia/Kolkata Asia/Chongqing|Asia/Chungking Asia/Chongqing|Asia/Harbin Asia/Chongqing|Asia/Macao Asia/Chongqing|Asia/Macau Asia/Chongqing|Asia/Shanghai Asia/Chongqing|Asia/Taipei Asia/Chongqing|PRC Asia/Chongqing|ROC Asia/Dacca|Asia/Dhaka Asia/Dubai|Asia/Muscat Asia/Hong_Kong|Hongkong Asia/Istanbul|Europe/Istanbul Asia/Istanbul|Turkey Asia/Jakarta|Asia/Pontianak Asia/Jerusalem|Asia/Tel_Aviv Asia/Jerusalem|Israel Asia/Kashgar|Asia/Urumqi Asia/Kathmandu|Asia/Katmandu Asia/Kuala_Lumpur|Asia/Kuching Asia/Makassar|Asia/Ujung_Pandang Asia/Nicosia|EET Asia/Nicosia|Europe/Athens Asia/Nicosia|Europe/Bucharest Asia/Nicosia|Europe/Chisinau Asia/Nicosia|Europe/Helsinki Asia/Nicosia|Europe/Kiev Asia/Nicosia|Europe/Mariehamn Asia/Nicosia|Europe/Nicosia Asia/Nicosia|Europe/Riga Asia/Nicosia|Europe/Sofia Asia/Nicosia|Europe/Tallinn Asia/Nicosia|Europe/Tiraspol Asia/Nicosia|Europe/Uzhgorod Asia/Nicosia|Europe/Vilnius Asia/Nicosia|Europe/Zaporozhye Asia/Pyongyang|Asia/Seoul Asia/Pyongyang|ROK Asia/Samarkand|Asia/Tashkent Asia/Singapore|Singapore Asia/Tehran|Iran Asia/Thimbu|Asia/Thimphu Asia/Tokyo|Japan Asia/Ulaanbaatar|Asia/Ulan_Bator Atlantic/Canary|Atlantic/Faeroe Atlantic/Canary|Atlantic/Faroe Atlantic/Canary|Atlantic/Madeira Atlantic/Canary|Europe/Lisbon Atlantic/Canary|Portugal Atlantic/Canary|WET Australia/ACT|Australia/Canberra Australia/ACT|Australia/Currie Australia/ACT|Australia/Hobart Australia/ACT|Australia/Melbourne Australia/ACT|Australia/NSW Australia/ACT|Australia/Sydney Australia/ACT|Australia/Tasmania Australia/ACT|Australia/Victoria Australia/Adelaide|Australia/Broken_Hill Australia/Adelaide|Australia/South Australia/Adelaide|Australia/Yancowinna Australia/Brisbane|Australia/Lindeman Australia/Brisbane|Australia/Queensland Australia/Darwin|Australia/North Australia/LHI|Australia/Lord_Howe Australia/Perth|Australia/West Chile/EasterIsland|Pacific/Easter Eire|Europe/Dublin Etc/UCT|UCT Etc/UTC|Etc/Universal Etc/UTC|Etc/Zulu Etc/UTC|UTC Etc/UTC|Universal Etc/UTC|Zulu Europe/Belfast|Europe/Guernsey Europe/Belfast|Europe/Isle_of_Man Europe/Belfast|Europe/Jersey Europe/Belfast|Europe/London Europe/Belfast|GB Europe/Belfast|GB-Eire Europe/Moscow|W-SU HST|Pacific/Honolulu HST|Pacific/Johnston HST|US/Hawaii Kwajalein|Pacific/Kwajalein Kwajalein|Pacific/Majuro NZ-CHAT|Pacific/Chatham Pacific/Chuuk|Pacific/Truk Pacific/Chuuk|Pacific/Yap Pacific/Guam|Pacific/Saipan Pacific/Midway|Pacific/Pago_Pago Pacific/Midway|Pacific/Samoa Pacific/Midway|US/Samoa Pacific/Pohnpei|Pacific/Ponape".split(" ")});
return d});
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#!/usr/bin/python
# -*- coding: utf-8 -*-
import Queue
import time
from concurrent.futures import ThreadPoolExecutor
def foo(i):
time.sleep(2)
print '>>>>time---', time.ctime()
return i + 100
def bar(arg):
print '----exec done:', arg, time.ctime()
if __name__ == '__main__':
resultList = list()
sendList = list()
queue = Queue.Queue()
futureList = list()
executor = ThreadPoolExecutor(5)
begin = time.time()
for i in range(4):
#future = executor.submit(foo, i)
sendList.append(i)
future = executor.map(foo, sendList)
# for i in range(4):
# print ">>>>>future next>>>", future.next()
# print ">>>>>future next1>>>", future.next()
# print ">>>>>future next2>>>", future.next()
# print ">>>>>future next3>>>", future.next()
# print ">>>>>future next4>>>", future.next()
for fu in futureList:
# print ">>>>fun done()>>>>", fu.done()
print ">>>>result>>>>>>>>", fu.result()
usedTime = time.time() - begin
print "used time: ", usedTime
#print queue.qsize()
for i in resultList:
print "result: ", i.get()
|
class BotClassifier(object):
# Class constructor
def __init__(self, trainData, method = 'tf-idf'):
#self.tweets, self.bot = trainData['text'], trainData['bot']
self.method = method
def train(self):
self.tokenize()
print('im doing training!')
def tokenize(self):
print('Im doing tokenize')
# Test the implementation of the BotClassifier Class
bot3000 = BotClassifier('train', 'tf-idf')
bot3000.train()
|
from compass import s1_rdr2geo, s1_geo2rdr, s1_resample, s1_geocode_slc
from compass.utils.geo_runconfig import GeoRunConfig
from compass.utils.runconfig import RunConfig
from compass.utils.yaml_argparse import YamlArgparse
def run(run_config_path: str, grid_type: str):
"""
Run CSLC with user-defined options.
Parameters
----------
run_config_path: str
File path with user-defined options in yaml format
grid_type: str
Grid type of the output CSLC
"""
if grid_type == 'radar':
# CSLC workflow in radar coordinates
# get a runconfig dict from command line args
cfg = RunConfig.load_from_yaml(run_config_path, 's1_cslc_radar')
if cfg.is_reference:
# reference burst - run rdr2geo and archive it
s1_rdr2geo.run(cfg)
else:
# secondary burst - run geo2rdr + resample
s1_geo2rdr.run(cfg)
s1_resample.run(cfg)
elif grid_type == 'geo':
# CSLC workflow in geo-coordinates
# get a runconfig dict from command line argumens
cfg = GeoRunConfig.load_from_yaml(run_config_path, 's1_cslc_geo')
# run geocode_slc
s1_geocode_slc.run(cfg)
def main():
parser = YamlArgparse(add_grid_type=True)
run(parser.run_config_path, parser.args.grid_type)
if __name__ == "__main__":
'''run s1_cslc from command line'''
main()
|
const runeURL = "https://rune-registry.web.app/registry/hotg-ai/mobilenet_v2_1/rune.rune";
var runtime;
let input;
let output;
//create capability and output classes
class ImageCapability {
parameters = {};
generate(dest,id) {
dest.set(input, 0);
}
setParameter(key, value) {
this.parameters[key] = value;
}
}
class SerialOutput {
consume(data) {
const utf8 = new TextDecoder();
output=JSON.parse(utf8.decode(data));
}
}
const imageCap = new ImageCapability();
const imports = {
createCapability: () => imageCap,
createOutput: () => new SerialOutput(),
createModel: (mime, model_data) => rune.TensorFlowModel.loadTensorFlowLite(model_data),
log: (log) => { console.log(log) },
};
async function loadRune() {
document.getElementById("loader").style.visibility = "visible";
const response = await fetch(runeURL);
const bytes = new Uint8Array(await response.arrayBuffer());
runtime = await rune.Runtime.load(bytes.buffer,imports);
document.getElementById("log").innerHTML=JSON.stringify(imageCap.parameters);
document.getElementById("rune").style.visibility = "visible";
document.getElementById("loader").style.visibility = "hidden";
startCamera();
}
async function runRune() {
//get input and resize
let video = document.getElementById("video");
input = rune.TensorFlowModel.resizeImage(video,224);
runtime.call();
//get output and convert to image
const result = JSON.stringify(output.elements);
console.log(result);
document.getElementById('result').innerHTML=result;
document.getElementById("loader").style.visibility = "hidden";
}
//image and video functions
document.getElementById("rune").style.visibility = "hidden";
let video = document.getElementById("video");
let click_button = document.getElementById("click-photo");
let canvas = document.getElementById("capture");
async function startCamera() {
let stream = await navigator.mediaDevices.getUserMedia({ video: true, audio: false });
video.setAttribute("playsinline", true);
video.srcObject = stream;
}
window.onload = function() {
loadRune();
};
document.getElementById("loader").style.visibility = "hidden";
|
#!/usr/bin/python
import sys
begin = int(sys.argv[1])
end = int(sys.argv[2])
print "between %i and %i" % (begin, end)
threshold = float(sys.argv[3])
print "averaging over %f" % (threshold)
def avg_peak(values):
sum = 0
count = 0
#print '---'
for value in values:
if value > threshold:
#print value
sum += value
count += 1
if count == 0:
return 0
return sum / count
results = []
index = 0
results.append([])
for line in sys.stdin:
values = line.split()
if len(values) == 2:
if not values[0].isdigit():
continue
time = int(values[0])
if time >= begin and time <= end:
#print values
value = float(values[1])
results[index].append(value)
if time > end and len(results[index]) != 0:
#print time
index = index + 1
results.append([])
print "\t".join(map(lambda (values): str(avg_peak(values)), results[:-1]))
|
//
// SUUIBasedUpdateDriver.h
// Sparkle
//
// Created by Andy Matuschak on 5/5/08.
// Copyright 2008 Andy Matuschak. All rights reserved.
//
#ifndef SUUIBASEDUPDATEDRIVER_H
#define SUUIBASEDUPDATEDRIVER_H
#import <Cocoa/Cocoa.h>
#import "SUBasicUpdateDriver.h"
#import "SUUpdateAlert.h"
@class SUStatusController;
@interface SUUIBasedUpdateDriver : SUBasicUpdateDriver <SUUnarchiverDelegate>
- (void)showModalAlert:(NSAlert *)alert;
- (IBAction)cancelDownload:(id)sender;
- (void)installAndRestart:(id)sender;
@end
#endif
|
# Copyright 2010-2011 OpenStack Foundation
# Copyright 2011 Piston Cloud Computing, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import copy
import datetime
import ddt
import functools
from urllib import parse as urlparse
import fixtures
import iso8601
import mock
from oslo_policy import policy as oslo_policy
from oslo_serialization import base64
from oslo_serialization import jsonutils
from oslo_utils import encodeutils
from oslo_utils import fixture as utils_fixture
from oslo_utils.fixture import uuidsentinel as uuids
from oslo_utils import timeutils
from oslo_utils import uuidutils
import testtools
import webob
from nova.api.openstack import api_version_request
from nova.api.openstack import common
from nova.api.openstack import compute
from nova.api.openstack.compute import ips
from nova.api.openstack.compute.schemas import servers as servers_schema
from nova.api.openstack.compute import servers
from nova.api.openstack.compute import views
from nova.api.openstack import wsgi as os_wsgi
from nova import availability_zones
from nova import block_device
from nova.compute import api as compute_api
from nova.compute import flavors
from nova.compute import task_states
from nova.compute import vm_states
import nova.conf
from nova import context
from nova.db import api as db
from nova.db.sqlalchemy import api as db_api
from nova.db.sqlalchemy import models
from nova import exception
from nova.image import glance
from nova import objects
from nova.objects import instance as instance_obj
from nova.objects.instance_group import InstanceGroup
from nova.objects import tag
from nova.policies import servers as server_policies
from nova import policy
from nova import test
from nova.tests import fixtures as nova_fixtures
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit import fake_block_device
from nova.tests.unit import fake_flavor
from nova.tests.unit import fake_instance
from nova.tests.unit import matchers
from nova import utils as nova_utils
CONF = nova.conf.CONF
FAKE_UUID = fakes.FAKE_UUID
UUID1 = '00000000-0000-0000-0000-000000000001'
UUID2 = '00000000-0000-0000-0000-000000000002'
INSTANCE_IDS = {FAKE_UUID: 1}
FIELDS = instance_obj.INSTANCE_DEFAULT_FIELDS
GET_ONLY_FIELDS = ['OS-EXT-AZ:availability_zone', 'config_drive',
'OS-EXT-SRV-ATTR:host',
'OS-EXT-SRV-ATTR:hypervisor_hostname',
'OS-EXT-SRV-ATTR:instance_name',
'OS-EXT-SRV-ATTR:hostname',
'OS-EXT-SRV-ATTR:kernel_id',
'OS-EXT-SRV-ATTR:launch_index',
'OS-EXT-SRV-ATTR:ramdisk_id',
'OS-EXT-SRV-ATTR:reservation_id',
'OS-EXT-SRV-ATTR:root_device_name',
'OS-EXT-SRV-ATTR:user_data', 'host_status',
'key_name', 'OS-SRV-USG:launched_at',
'OS-SRV-USG:terminated_at',
'OS-EXT-STS:task_state', 'OS-EXT-STS:vm_state',
'OS-EXT-STS:power_state', 'security_groups',
'os-extended-volumes:volumes_attached']
def instance_update_and_get_original(context, instance_uuid, values,
columns_to_join=None,
):
inst = fakes.stub_instance(INSTANCE_IDS.get(instance_uuid),
name=values.get('display_name'))
inst = dict(inst, **values)
return (inst, inst)
def instance_update(context, instance_uuid, values):
inst = fakes.stub_instance(INSTANCE_IDS.get(instance_uuid),
name=values.get('display_name'))
inst = dict(inst, **values)
return inst
def fake_compute_api(cls, req, id):
return True
def fake_start_stop_not_ready(self, context, instance):
raise exception.InstanceNotReady(instance_id=instance["uuid"])
def fake_start_stop_invalid_state(self, context, instance):
raise exception.InstanceInvalidState(
instance_uuid=instance['uuid'], attr='fake_attr',
method='fake_method', state='fake_state')
def fake_instance_get_by_uuid_not_found(context, uuid,
columns_to_join, use_slave=False):
raise exception.InstanceNotFound(instance_id=uuid)
def fake_instance_get_all_with_locked(context, list_locked, **kwargs):
obj_list = []
s_id = 0
for locked in list_locked:
uuid = fakes.get_fake_uuid(locked)
s_id = s_id + 1
kwargs['locked_by'] = None if locked == 'not_locked' else locked
server = fakes.stub_instance_obj(context, id=s_id, uuid=uuid, **kwargs)
obj_list.append(server)
return objects.InstanceList(objects=obj_list)
def fake_instance_get_all_with_description(context, list_desc, **kwargs):
obj_list = []
s_id = 0
for desc in list_desc:
uuid = fakes.get_fake_uuid(desc)
s_id = s_id + 1
kwargs['display_description'] = desc
server = fakes.stub_instance_obj(context, id=s_id, uuid=uuid, **kwargs)
obj_list.append(server)
return objects.InstanceList(objects=obj_list)
def fake_compute_get_empty_az(*args, **kwargs):
inst = fakes.stub_instance(vm_state=vm_states.ACTIVE,
availability_zone='')
return fake_instance.fake_instance_obj(args[1], **inst)
def fake_bdms_get_all_by_instance_uuids(*args, **kwargs):
return [
fake_block_device.FakeDbBlockDeviceDict({
'id': 1,
'volume_id': 'some_volume_1',
'instance_uuid': FAKE_UUID,
'source_type': 'volume',
'destination_type': 'volume',
'delete_on_termination': True,
}),
fake_block_device.FakeDbBlockDeviceDict({
'id': 2,
'volume_id': 'some_volume_2',
'instance_uuid': FAKE_UUID,
'source_type': 'volume',
'destination_type': 'volume',
'delete_on_termination': False,
}),
]
def fake_get_inst_mappings_by_instance_uuids_from_db(*args, **kwargs):
return [{
'id': 1,
'instance_uuid': UUID1,
'cell_mapping': {
'id': 1, 'uuid': uuids.cell1, 'name': 'fake',
'transport_url': 'fake://nowhere/', 'updated_at': None,
'database_connection': uuids.cell1, 'created_at': None,
'disabled': False},
'project_id': 'fake-project'
}]
class MockSetAdminPassword(object):
def __init__(self):
self.instance_id = None
self.password = None
def __call__(self, context, instance_id, password):
self.instance_id = instance_id
self.password = password
class ControllerTest(test.TestCase):
project_id = fakes.FAKE_PROJECT_ID
path = '/%s/servers' % project_id
path_v2 = '/v2' + path
path_with_id = path + '/%s'
path_with_id_v2 = path_v2 + '/%s'
path_with_query = path + '?%s'
path_detail = path + '/detail'
path_detail_v2 = path_v2 + '/detail'
path_detail_with_query = path_detail + '?%s'
path_action = path + '/%s/action'
def setUp(self):
super(ControllerTest, self).setUp()
fakes.stub_out_nw_api(self)
fakes.stub_out_key_pair_funcs(self)
self.useFixture(nova_fixtures.GlanceFixture(self))
fakes.stub_out_secgroup_api(
self, security_groups=[{'name': 'default'}])
return_server = fakes.fake_compute_get(id=2, availability_zone='nova',
launched_at=None,
terminated_at=None,
task_state=None,
vm_state=vm_states.ACTIVE,
power_state=1)
return_servers = fakes.fake_compute_get_all()
# Server sort keys extension is enabled in v21 so sort data is passed
# to the instance API and the sorted DB API is invoked
self.mock_get_all = self.useFixture(fixtures.MockPatchObject(
compute_api.API, 'get_all', side_effect=return_servers)).mock
self.mock_get = self.useFixture(fixtures.MockPatchObject(
compute_api.API, 'get', side_effect=return_server)).mock
self.stub_out('nova.db.api.instance_update_and_get_original',
instance_update_and_get_original)
self.stub_out('nova.db.api.'
'block_device_mapping_get_all_by_instance_uuids',
fake_bdms_get_all_by_instance_uuids)
self.stub_out('nova.objects.InstanceMappingList.'
'_get_by_instance_uuids_from_db',
fake_get_inst_mappings_by_instance_uuids_from_db)
self.flags(group='glance', api_servers=['http://localhost:9292'])
self.controller = servers.ServersController()
self.ips_controller = ips.IPsController()
# Assume that anything that hits the compute API and looks for a
# RequestSpec doesn't care about it, since testing logic that deep
# should be done in nova.tests.unit.compute.test_api.
mock_reqspec = mock.patch('nova.objects.RequestSpec')
mock_reqspec.start()
self.addCleanup(mock_reqspec.stop)
# Similarly we shouldn't care about anything hitting conductor from
# these tests.
mock_conductor = mock.patch.object(
self.controller.compute_api, 'compute_task_api')
mock_conductor.start()
self.addCleanup(mock_conductor.stop)
class ServersControllerTest(ControllerTest):
wsgi_api_version = os_wsgi.DEFAULT_API_VERSION
def setUp(self):
super(ServersControllerTest, self).setUp()
self.request = fakes.HTTPRequest.blank(
self.path_with_id_v2 % FAKE_UUID,
use_admin_context=False,
version=self.wsgi_api_version)
return_server = fakes.fake_compute_get(
id=2, availability_zone='nova',
launched_at=None,
terminated_at=None,
task_state=None,
vm_state=vm_states.ACTIVE,
power_state=1,
project_id=self.request.environ['nova.context'].project_id)
self.mock_get.side_effect = return_server
def req(self, url, use_admin_context=False):
return fakes.HTTPRequest.blank(url,
use_admin_context=use_admin_context,
version=self.wsgi_api_version)
@mock.patch('nova.objects.Instance.get_by_uuid')
@mock.patch('nova.objects.InstanceMapping.get_by_instance_uuid')
def test_instance_lookup_targets(self, mock_get_im, mock_get_inst):
ctxt = context.RequestContext('fake', self.project_id)
mock_get_im.return_value.cell_mapping.database_connection = uuids.cell1
self.controller._get_instance(ctxt, 'foo')
mock_get_im.assert_called_once_with(ctxt, 'foo')
self.assertIsNotNone(ctxt.db_connection)
def test_requested_networks_prefix(self):
"""Tests that we no longer support the legacy br-<uuid> format for
a network id.
"""
uuid = 'br-00000000-0000-0000-0000-000000000000'
requested_networks = [{'uuid': uuid}]
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._get_requested_networks,
requested_networks)
self.assertIn('Bad networks format: network uuid is not in proper '
'format', str(ex))
def test_requested_networks_enabled_with_port(self):
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'port': port}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(None, None, port, None)], res.as_tuples())
def test_requested_networks_enabled_with_network(self):
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
requested_networks = [{'uuid': network}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(network, None, None, None)], res.as_tuples())
def test_requested_networks_enabled_with_network_and_port(self):
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network, 'port': port}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(None, None, port, None)], res.as_tuples())
def test_requested_networks_with_and_duplicate_networks(self):
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
requested_networks = [{'uuid': network}, {'uuid': network}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(network, None, None, None),
(network, None, None, None)], res.as_tuples())
def test_requested_networks_enabled_conflict_on_fixed_ip(self):
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
addr = '10.0.0.1'
requested_networks = [{'uuid': network,
'fixed_ip': addr,
'port': port}]
self.assertRaises(
webob.exc.HTTPBadRequest,
self.controller._get_requested_networks,
requested_networks)
def test_requested_networks_api_enabled_with_v2_subclass(self):
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network, 'port': port}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(None, None, port, None)], res.as_tuples())
def test_get_server_by_uuid(self):
res_dict = self.controller.show(self.request, FAKE_UUID)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
def test_get_server_joins(self):
def fake_get(*args, **kwargs):
expected_attrs = kwargs['expected_attrs']
self.assertEqual(['flavor', 'info_cache', 'metadata',
'numa_topology'], expected_attrs)
ctxt = context.RequestContext('fake', self.project_id)
return fake_instance.fake_instance_obj(
ctxt, expected_attrs=expected_attrs,
project_id=self.request.environ['nova.context'].project_id)
self.mock_get.side_effect = fake_get
self.controller.show(self.request, FAKE_UUID)
def test_unique_host_id(self):
"""Create two servers with the same host and different
project_ids and check that the host_id's are unique.
"""
def return_instance_with_host(context, *args, **kwargs):
project_id = context.project_id
return fakes.stub_instance_obj(context, id=1, uuid=FAKE_UUID,
project_id=project_id,
host='fake_host')
req1 = self.req(self.path_with_id % FAKE_UUID)
project_id = uuidutils.generate_uuid()
req2 = fakes.HTTPRequest.blank(self.path_with_id % FAKE_UUID,
version=self.wsgi_api_version,
project_id=project_id)
self.mock_get.side_effect = return_instance_with_host
server1 = self.controller.show(req1, FAKE_UUID)
server2 = self.controller.show(req2, FAKE_UUID)
self.assertNotEqual(server1['server']['hostId'],
server2['server']['hostId'])
def _get_server_data_dict(self, uuid, image_bookmark, flavor_bookmark,
status="ACTIVE", progress=100):
return {
"server": {
"id": uuid,
"user_id": "fake_user",
"created": "2010-10-10T12:00:00Z",
"updated": "2010-11-11T11:00:00Z",
"progress": progress,
"name": "server2",
"status": status,
"hostId": '',
"image": {
"id": FAKE_UUID,
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "2",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': '2001:db8:0:1::1',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'}
]
},
"metadata": {
"seq": "2",
},
"links": [
{
"rel": "self",
"href": "http://localhost%s/%s" % (self.path_v2, uuid),
},
{
"rel": "bookmark",
"href": "http://localhost%s/%s" % (self.path, uuid),
},
],
"OS-DCF:diskConfig": "MANUAL",
"accessIPv4": '',
"accessIPv6": '',
"OS-EXT-AZ:availability_zone": "nova",
"config_drive": None,
"OS-EXT-SRV-ATTR:host": None,
"OS-EXT-SRV-ATTR:hypervisor_hostname": None,
"OS-EXT-SRV-ATTR:instance_name": "instance-00000002",
"key_name": '',
"OS-SRV-USG:launched_at": None,
"OS-SRV-USG:terminated_at": None,
"security_groups": [{'name': 'default'}],
"OS-EXT-STS:task_state": None,
"OS-EXT-STS:vm_state": vm_states.ACTIVE,
"OS-EXT-STS:power_state": 1,
"os-extended-volumes:volumes_attached": [
{'id': 'some_volume_1'},
{'id': 'some_volume_2'},
],
"tenant_id": self.request.environ['nova.context'].project_id
}
}
def test_get_server_by_id(self):
image_bookmark = "http://localhost/%s/images/%s" % (
self.project_id, FAKE_UUID)
flavor_bookmark = "http://localhost/%s/flavors/2" % self.project_id
uuid = FAKE_UUID
res_dict = self.controller.show(self.request, uuid)
expected_server = self._get_server_data_dict(uuid,
image_bookmark,
flavor_bookmark,
progress=0)
expected_server['server']['tenant_id'] = self.request.environ[
'nova.context'].project_id
self.assertThat(res_dict, matchers.DictMatches(expected_server))
def test_get_server_empty_az(self):
uuid = FAKE_UUID
req = self.req(self.path_with_id_v2 % uuid)
self.mock_get.side_effect = fakes.fake_compute_get(
availability_zone='',
project_id=req.environ['nova.context'].project_id)
res_dict = self.controller.show(req, uuid)
self.assertEqual(res_dict['server']['OS-EXT-AZ:availability_zone'], '')
def test_get_server_with_active_status_by_id(self):
image_bookmark = "http://localhost/%s/images/%s" % (
self.project_id, FAKE_UUID)
flavor_bookmark = "http://localhost/%s/flavors/2" % self.project_id
res_dict = self.controller.show(self.request, FAKE_UUID)
expected_server = self._get_server_data_dict(FAKE_UUID,
image_bookmark,
flavor_bookmark,
progress=0)
expected_server['server']['tenant_id'] = self.request.environ[
'nova.context'].project_id
self.assertThat(res_dict, matchers.DictMatches(expected_server))
self.mock_get.assert_called_once_with(
self.request.environ['nova.context'], FAKE_UUID,
expected_attrs=['flavor', 'info_cache', 'metadata',
'numa_topology'], cell_down_support=False)
def test_get_server_with_id_image_ref_by_id(self):
image_bookmark = "http://localhost/%s/images/%s" % (
self.project_id, FAKE_UUID)
flavor_bookmark = "http://localhost/%s/flavors/2" % self.project_id
res_dict = self.controller.show(self.request, FAKE_UUID)
expected_server = self._get_server_data_dict(FAKE_UUID,
image_bookmark,
flavor_bookmark,
progress=0)
expected_server['server']['tenant_id'] = self.request.environ[
'nova.context'].project_id
self.assertThat(res_dict, matchers.DictMatches(expected_server))
self.mock_get.assert_called_once_with(
self.request.environ['nova.context'], FAKE_UUID,
expected_attrs=['flavor', 'info_cache', 'metadata',
'numa_topology'], cell_down_support=False)
def _generate_nw_cache_info(self):
pub0 = ('172.19.0.1', '172.19.0.2',)
pub1 = ('1.2.3.4',)
pub2 = ('b33f::fdee:ddff:fecc:bbaa',)
priv0 = ('192.168.0.3', '192.168.0.4',)
def _ip(ip):
return {'address': ip, 'type': 'fixed'}
nw_cache = [
{'address': 'aa:aa:aa:aa:aa:aa',
'id': 1,
'network': {'bridge': 'br0',
'id': 1,
'label': 'public',
'subnets': [{'cidr': '172.19.0.0/24',
'ips': [_ip(ip) for ip in pub0]},
{'cidr': '1.2.3.0/16',
'ips': [_ip(ip) for ip in pub1]},
{'cidr': 'b33f::/64',
'ips': [_ip(ip) for ip in pub2]}]}},
{'address': 'bb:bb:bb:bb:bb:bb',
'id': 2,
'network': {'bridge': 'br1',
'id': 2,
'label': 'private',
'subnets': [{'cidr': '192.168.0.0/24',
'ips': [_ip(ip) for ip in priv0]}]}}]
return nw_cache
def test_get_server_addresses_from_cache(self):
nw_cache = self._generate_nw_cache_info()
self.mock_get.side_effect = fakes.fake_compute_get(nw_cache=nw_cache,
availability_zone='nova')
req = self.req((self.path_with_id % FAKE_UUID) + '/ips')
res_dict = self.ips_controller.index(req, FAKE_UUID)
expected = {
'addresses': {
'private': [
{'version': 4, 'addr': '192.168.0.3'},
{'version': 4, 'addr': '192.168.0.4'},
],
'public': [
{'version': 4, 'addr': '172.19.0.1'},
{'version': 4, 'addr': '172.19.0.2'},
{'version': 4, 'addr': '1.2.3.4'},
{'version': 6, 'addr': 'b33f::fdee:ddff:fecc:bbaa'},
],
},
}
self.assertThat(res_dict, matchers.DictMatches(expected))
self.mock_get.assert_called_once_with(
req.environ['nova.context'], FAKE_UUID,
expected_attrs=None, cell_down_support=False)
# Make sure we kept the addresses in order
self.assertIsInstance(res_dict['addresses'], collections.OrderedDict)
labels = [vif['network']['label'] for vif in nw_cache]
for index, label in enumerate(res_dict['addresses'].keys()):
self.assertEqual(label, labels[index])
def test_get_server_addresses_nonexistent_network(self):
url = ((self.path_with_id_v2 % FAKE_UUID) + '/ips/network_0')
req = self.req(url)
self.assertRaises(webob.exc.HTTPNotFound, self.ips_controller.show,
req, FAKE_UUID, 'network_0')
def test_get_server_addresses_nonexistent_server(self):
self.mock_get.side_effect = exception.InstanceNotFound(
instance_id='fake')
req = self.req((self.path_with_id % uuids.fake) + '/ips')
self.assertRaises(webob.exc.HTTPNotFound,
self.ips_controller.index, req, uuids.fake)
self.mock_get.assert_called_once_with(
req.environ['nova.context'], uuids.fake, expected_attrs=None,
cell_down_support=False)
def test_show_server_hide_addresses_in_building(self):
uuid = FAKE_UUID
req = self.req(self.path_with_id_v2 % uuid)
self.mock_get.side_effect = fakes.fake_compute_get(
uuid=uuid, vm_state=vm_states.BUILDING,
project_id=req.environ['nova.context'].project_id)
res_dict = self.controller.show(req, uuid)
self.assertEqual({}, res_dict['server']['addresses'])
def test_show_server_addresses_in_non_building(self):
uuid = FAKE_UUID
nw_cache = self._generate_nw_cache_info()
expected = {
'addresses': {
'private': [
{'version': 4, 'addr': '192.168.0.3',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'bb:bb:bb:bb:bb:bb'},
{'version': 4, 'addr': '192.168.0.4',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'bb:bb:bb:bb:bb:bb'},
],
'public': [
{'version': 4, 'addr': '172.19.0.1',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 4, 'addr': '172.19.0.2',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 4, 'addr': '1.2.3.4',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': 'b33f::fdee:ddff:fecc:bbaa',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
],
},
}
req = self.req(self.path_with_id_v2 % uuid)
self.mock_get.side_effect = fakes.fake_compute_get(
nw_cache=nw_cache, uuid=uuid, vm_state=vm_states.ACTIVE,
project_id=req.environ['nova.context'].project_id)
res_dict = self.controller.show(req, uuid)
self.assertThat(res_dict['server']['addresses'],
matchers.DictMatches(expected['addresses']))
def test_detail_server_hide_addresses(self):
nw_cache = self._generate_nw_cache_info()
expected = {
'addresses': {
'private': [
{'version': 4, 'addr': '192.168.0.3',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'bb:bb:bb:bb:bb:bb'},
{'version': 4, 'addr': '192.168.0.4',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'bb:bb:bb:bb:bb:bb'},
],
'public': [
{'version': 4, 'addr': '172.19.0.1',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 4, 'addr': '172.19.0.2',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 4, 'addr': '1.2.3.4',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': 'b33f::fdee:ddff:fecc:bbaa',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
],
},
}
def fake_get_all(context, **kwargs):
return objects.InstanceList(
objects=[fakes.stub_instance_obj(1,
vm_state=vm_states.BUILDING,
uuid=uuids.fake,
nw_cache=nw_cache),
fakes.stub_instance_obj(2,
vm_state=vm_states.ACTIVE,
uuid=uuids.fake2,
nw_cache=nw_cache)])
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'deleted=true',
use_admin_context=True)
servers = self.controller.detail(req)['servers']
for server in servers:
if server['OS-EXT-STS:vm_state'] == 'building':
self.assertEqual({}, server['addresses'])
else:
self.assertThat(server['addresses'],
matchers.DictMatches(expected['addresses']))
def test_get_server_list_empty(self):
self.mock_get_all.side_effect = None
self.mock_get_all.return_value = objects.InstanceList(objects=[])
req = self.req(self.path)
res_dict = self.controller.index(req)
self.assertEqual(0, len(res_dict['servers']))
self.mock_get_all.assert_called_once_with(
req.environ['nova.context'], expected_attrs=[], limit=1000,
marker=None, search_opts={'deleted': False,
'project_id': self.project_id},
sort_dirs=['desc'], sort_keys=['created_at'],
cell_down_support=False, all_tenants=False)
def test_get_server_list_with_reservation_id(self):
req = self.req(self.path_with_query % 'reservation_id=foo')
res_dict = self.controller.index(req)
i = 0
for s in res_dict['servers']:
self.assertEqual(s.get('name'), 'server%d' % (i + 1))
i += 1
def test_get_server_list_with_reservation_id_empty(self):
req = self.req(self.path_detail_with_query % 'reservation_id=foo')
res_dict = self.controller.detail(req)
i = 0
for s in res_dict['servers']:
self.assertEqual(s.get('name'), 'server%d' % (i + 1))
i += 1
def test_get_server_list_with_reservation_id_details(self):
req = self.req(self.path_detail_with_query % 'reservation_id=foo')
res_dict = self.controller.detail(req)
i = 0
for s in res_dict['servers']:
self.assertEqual(s.get('name'), 'server%d' % (i + 1))
i += 1
def test_get_server_list(self):
req = self.req(self.path)
res_dict = self.controller.index(req)
self.assertEqual(len(res_dict['servers']), 5)
for i, s in enumerate(res_dict['servers']):
self.assertEqual(s['id'], fakes.get_fake_uuid(i))
self.assertEqual(s['name'], 'server%d' % (i + 1))
self.assertIsNone(s.get('image', None))
expected_links = [
{
"rel": "self",
"href": "http://localhost" + (
self.path_with_id_v2 % s['id']),
},
{
"rel": "bookmark",
"href": "http://localhost" + (
self.path_with_id % s['id']),
},
]
self.assertEqual(s['links'], expected_links)
def test_get_servers_with_limit(self):
req = self.req(self.path_with_query % 'limit=3')
res_dict = self.controller.index(req)
servers = res_dict['servers']
self.assertEqual([s['id'] for s in servers],
[fakes.get_fake_uuid(i) for i in range(len(servers))])
servers_links = res_dict['servers_links']
self.assertEqual(servers_links[0]['rel'], 'next')
href_parts = urlparse.urlparse(servers_links[0]['href'])
self.assertEqual('/v2' + self.path,
href_parts.path)
params = urlparse.parse_qs(href_parts.query)
expected_params = {'limit': ['3'],
'marker': [fakes.get_fake_uuid(2)]}
self.assertThat(params, matchers.DictMatches(expected_params))
def test_get_servers_with_limit_bad_value(self):
req = self.req(self.path_with_query % 'limit=aaa')
self.assertRaises(exception.ValidationError,
self.controller.index, req)
def test_get_server_details_empty(self):
self.mock_get_all.side_effect = None
self.mock_get_all.return_value = objects.InstanceList(objects=[])
req = self.req(self.path_detail)
expected_attrs = ['flavor', 'info_cache', 'metadata']
if api_version_request.is_supported(req, '2.16'):
expected_attrs.append('services')
res_dict = self.controller.detail(req)
self.assertEqual(0, len(res_dict['servers']))
self.mock_get_all.assert_called_once_with(
req.environ['nova.context'],
expected_attrs=sorted(expected_attrs),
limit=1000, marker=None,
search_opts={'deleted': False, 'project_id': self.project_id},
sort_dirs=['desc'], sort_keys=['created_at'],
cell_down_support=False, all_tenants=False)
def test_get_server_details_with_bad_name(self):
req = self.req(self.path_detail_with_query % 'name=%2Binstance')
self.assertRaises(exception.ValidationError,
self.controller.index, req)
def test_get_server_details_with_limit(self):
req = self.req(self.path_detail_with_query % 'limit=3')
res = self.controller.detail(req)
servers = res['servers']
self.assertEqual([s['id'] for s in servers],
[fakes.get_fake_uuid(i) for i in range(len(servers))])
servers_links = res['servers_links']
self.assertEqual(servers_links[0]['rel'], 'next')
href_parts = urlparse.urlparse(servers_links[0]['href'])
self.assertEqual(self.path_detail_v2, href_parts.path)
params = urlparse.parse_qs(href_parts.query)
expected = {'limit': ['3'], 'marker': [fakes.get_fake_uuid(2)]}
self.assertThat(params, matchers.DictMatches(expected))
def test_get_server_details_with_limit_bad_value(self):
req = self.req(self.path_detail_with_query % 'limit=aaa')
self.assertRaises(exception.ValidationError,
self.controller.detail, req)
def test_get_server_details_with_limit_and_other_params(self):
req = self.req(self.path_detail_with_query %
'limit=3&blah=2:t&sort_key=uuid&sort_dir=asc')
res = self.controller.detail(req)
servers = res['servers']
self.assertEqual([s['id'] for s in servers],
[fakes.get_fake_uuid(i) for i in range(len(servers))])
servers_links = res['servers_links']
self.assertEqual(servers_links[0]['rel'], 'next')
href_parts = urlparse.urlparse(servers_links[0]['href'])
self.assertEqual(self.path_detail_v2, href_parts.path)
params = urlparse.parse_qs(href_parts.query)
expected = {'limit': ['3'],
'sort_key': ['uuid'], 'sort_dir': ['asc'],
'marker': [fakes.get_fake_uuid(2)]}
self.assertThat(params, matchers.DictMatches(expected))
def test_get_servers_with_too_big_limit(self):
req = self.req(self.path_with_query % 'limit=30')
res_dict = self.controller.index(req)
self.assertNotIn('servers_links', res_dict)
def test_get_servers_with_bad_limit(self):
req = self.req(self.path_with_query % 'limit=asdf')
self.assertRaises(exception.ValidationError,
self.controller.index, req)
def test_get_servers_with_marker(self):
url = '%s?marker=%s' % (self.path_v2, fakes.get_fake_uuid(2))
req = self.req(url)
servers = self.controller.index(req)['servers']
self.assertEqual([s['name'] for s in servers], ["server4", "server5"])
def test_get_servers_with_limit_and_marker(self):
url = '%s?limit=2&marker=%s' % (self.path_v2,
fakes.get_fake_uuid(1))
req = self.req(url)
servers = self.controller.index(req)['servers']
self.assertEqual([s['name'] for s in servers], ['server3', 'server4'])
def test_get_servers_with_bad_marker(self):
req = self.req(self.path_with_query % 'limit=2&marker=asdf')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
def test_get_servers_with_invalid_filter_param(self):
req = self.req(self.path_with_query % 'info_cache=asdf',
use_admin_context=True)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
req = self.req(self.path_with_query % '__foo__=asdf',
use_admin_context=True)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
def test_get_servers_with_invalid_regex_filter_param(self):
req = self.req(self.path_with_query % 'flavor=[[[',
use_admin_context=True)
self.assertRaises(exception.ValidationError,
self.controller.index, req)
def test_get_servers_with_empty_regex_filter_param(self):
req = self.req(self.path_with_query % 'flavor=',
use_admin_context=True)
self.assertRaises(exception.ValidationError,
self.controller.index, req)
def test_get_servers_detail_with_empty_regex_filter_param(self):
req = self.req(self.path_detail_with_query % 'flavor=',
use_admin_context=True)
self.assertRaises(exception.ValidationError,
self.controller.detail, req)
def test_get_servers_invalid_sort_key(self):
# "hidden" is a real field for instances but not exposed in the API.
req = self.req(self.path_with_query %
'sort_key=hidden&sort_dir=desc')
self.assertRaises(exception.ValidationError,
self.controller.index, req)
def test_get_servers_ignore_sort_key(self):
req = self.req(self.path_with_query %
'sort_key=vcpus&sort_dir=asc')
self.controller.index(req)
self.mock_get_all.assert_called_once_with(
mock.ANY, search_opts=mock.ANY, limit=mock.ANY, marker=mock.ANY,
expected_attrs=mock.ANY, sort_keys=[], sort_dirs=[],
cell_down_support=False, all_tenants=False)
def test_get_servers_ignore_locked_sort_key(self):
# Prior to microversion 2.73 locked sort key is ignored.
req = self.req(self.path_with_query %
'sort_key=locked&sort_dir=asc')
self.controller.detail(req)
self.mock_get_all.assert_called_once_with(
mock.ANY, search_opts=mock.ANY, limit=mock.ANY, marker=mock.ANY,
expected_attrs=mock.ANY, sort_keys=[], sort_dirs=[],
cell_down_support=False, all_tenants=False)
def test_get_servers_ignore_sort_key_only_one_dir(self):
req = self.req(self.path_with_query %
'sort_key=user_id&sort_key=vcpus&sort_dir=asc')
self.controller.index(req)
self.mock_get_all.assert_called_once_with(
mock.ANY, search_opts=mock.ANY, limit=mock.ANY, marker=mock.ANY,
expected_attrs=mock.ANY, sort_keys=['user_id'],
sort_dirs=['asc'], cell_down_support=False, all_tenants=False)
def test_get_servers_ignore_sort_key_with_no_sort_dir(self):
req = self.req(self.path_with_query %
'sort_key=vcpus&sort_key=user_id')
self.controller.index(req)
self.mock_get_all.assert_called_once_with(
mock.ANY, search_opts=mock.ANY, limit=mock.ANY, marker=mock.ANY,
expected_attrs=mock.ANY, sort_keys=['user_id'], sort_dirs=[],
cell_down_support=False, all_tenants=False)
def test_get_servers_ignore_sort_key_with_bad_sort_dir(self):
req = self.req(self.path_with_query %
'sort_key=vcpus&sort_dir=bad_dir')
self.controller.index(req)
self.mock_get_all.assert_called_once_with(
mock.ANY, search_opts=mock.ANY, limit=mock.ANY, marker=mock.ANY,
expected_attrs=mock.ANY, sort_keys=[], sort_dirs=[],
cell_down_support=False, all_tenants=False)
def test_get_servers_non_admin_with_admin_only_sort_key(self):
req = self.req(self.path_with_query %
'sort_key=host&sort_dir=desc')
self.assertRaises(webob.exc.HTTPForbidden,
self.controller.index, req)
def test_get_servers_admin_with_admin_only_sort_key(self):
req = self.req(self.path_with_query %
'sort_key=node&sort_dir=desc',
use_admin_context=True)
self.controller.detail(req)
self.mock_get_all.assert_called_once_with(
mock.ANY, search_opts=mock.ANY, limit=mock.ANY, marker=mock.ANY,
expected_attrs=mock.ANY, sort_keys=['node'], sort_dirs=['desc'],
cell_down_support=False, all_tenants=False)
def test_get_servers_with_bad_option(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
db_list = [fakes.stub_instance(100, uuid=uuids.fake)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'unknownoption=whee')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
self.mock_get_all.assert_called_once_with(
req.environ['nova.context'], expected_attrs=[],
limit=1000, marker=None,
search_opts={'deleted': False, 'project_id': self.project_id},
sort_dirs=['desc'], sort_keys=['created_at'],
cell_down_support=False, all_tenants=False)
def test_get_servers_with_locked_filter(self):
# Prior to microversion 2.73 locked filter parameter is ignored.
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
db_list = [fakes.stub_instance(100, uuid=uuids.fake)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'locked=true')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
self.mock_get_all.assert_called_once_with(
req.environ['nova.context'], expected_attrs=[],
limit=1000, marker=None,
search_opts={'deleted': False, 'project_id': self.project_id},
sort_dirs=['desc'], sort_keys=['created_at'],
cell_down_support=False, all_tenants=False)
def test_get_servers_allows_image(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('image', search_opts)
self.assertEqual(search_opts['image'], '12345')
db_list = [fakes.stub_instance(100, uuid=uuids.fake)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'image=12345')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_tenant_id_filter_no_admin_context(self):
def fake_get_all(context, search_opts=None, **kwargs):
self.assertIsNotNone(search_opts)
self.assertNotIn('tenant_id', search_opts)
self.assertEqual(self.project_id, search_opts['project_id'])
return [fakes.stub_instance_obj(100)]
req = self.req(self.path_with_query % 'tenant_id=newfake')
self.mock_get_all.side_effect = fake_get_all
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
def test_tenant_id_filter_admin_context(self):
""""Test tenant_id search opt is dropped if all_tenants is not set."""
def fake_get_all(context, search_opts=None, **kwargs):
self.assertIsNotNone(search_opts)
self.assertNotIn('tenant_id', search_opts)
self.assertEqual(self.project_id, search_opts['project_id'])
return [fakes.stub_instance_obj(100)]
req = self.req(self.path_with_query % 'tenant_id=newfake',
use_admin_context=True)
self.mock_get_all.side_effect = fake_get_all
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
def test_all_tenants_param_normal(self):
def fake_get_all(context, search_opts=None, **kwargs):
self.assertNotIn('project_id', search_opts)
return [fakes.stub_instance_obj(100)]
req = self.req(self.path_with_query % 'all_tenants',
use_admin_context=True)
self.mock_get_all.side_effect = fake_get_all
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
def test_all_tenants_param_one(self):
def fake_get_all(context, search_opts=None, **kwargs):
self.assertNotIn('project_id', search_opts)
return [fakes.stub_instance_obj(100)]
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'all_tenants=1',
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
def test_all_tenants_param_zero(self):
def fake_get_all(context, search_opts=None, **kwargs):
self.assertNotIn('all_tenants', search_opts)
return [fakes.stub_instance_obj(100)]
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'all_tenants=0',
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
def test_all_tenants_param_false(self):
def fake_get_all(context, search_opts=None, **kwargs):
self.assertNotIn('all_tenants', search_opts)
return [fakes.stub_instance_obj(100)]
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'all_tenants=false',
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
def test_all_tenants_param_invalid(self):
def fake_get_all(context, search_opts=None, **kwargs):
self.assertNotIn('all_tenants', search_opts)
return [fakes.stub_instance_obj(100)]
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'all_tenants=xxx',
use_admin_context=True)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
def test_admin_restricted_tenant(self):
def fake_get_all(context, search_opts=None, **kwargs):
self.assertIsNotNone(search_opts)
self.assertEqual(search_opts['project_id'], self.project_id)
return [fakes.stub_instance_obj(100)]
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path, use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
def test_all_tenants_pass_policy(self):
def fake_get_all(context, search_opts=None, **kwargs):
self.assertIsNotNone(search_opts)
self.assertNotIn('project_id', search_opts)
self.assertTrue(context.is_admin)
return [fakes.stub_instance_obj(100)]
self.mock_get_all.side_effect = fake_get_all
rules = {
"os_compute_api:servers:index": "project_id:%s" % self.project_id,
"os_compute_api:servers:index:get_all_tenants":
"project_id:%s" % self.project_id
}
policy.set_rules(oslo_policy.Rules.from_dict(rules))
req = self.req(self.path_with_query % 'all_tenants=1')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
def test_get_servers_allows_flavor(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('flavor', search_opts)
# flavor is an integer ID
self.assertEqual(search_opts['flavor'], '12345')
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'flavor=12345')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_with_bad_flavor(self):
req = self.req(self.path_with_query % 'flavor=abcde')
self.mock_get_all.side_effect = None
self.mock_get_all.return_value = objects.InstanceList(objects=[])
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 0)
def test_get_server_details_with_bad_flavor(self):
req = self.req(self.path_with_query % 'flavor=abcde')
self.mock_get_all.side_effect = None
self.mock_get_all.return_value = objects.InstanceList(objects=[])
servers = self.controller.detail(req)['servers']
self.assertThat(servers, testtools.matchers.HasLength(0))
def test_get_servers_allows_status(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('vm_state', search_opts)
self.assertEqual(search_opts['vm_state'], [vm_states.ACTIVE])
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'status=active')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_allows_task_status(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('task_state', search_opts)
self.assertEqual([task_states.REBOOT_PENDING,
task_states.REBOOT_STARTED,
task_states.REBOOTING],
search_opts['task_state'])
return objects.InstanceList(
objects=[fakes.stub_instance_obj(
100, uuid=uuids.fake, task_state=task_states.REBOOTING)])
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'status=reboot')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_resize_status(self):
# Test when resize status, it maps list of vm states.
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIn('vm_state', search_opts)
self.assertEqual(search_opts['vm_state'],
[vm_states.ACTIVE, vm_states.STOPPED])
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'status=resize')
servers = self.controller.detail(req)['servers']
self.assertEqual(1, len(servers), 1)
self.assertEqual(servers[0]['id'], uuids.fake)
def test_get_servers_invalid_status(self):
# Test getting servers by invalid status.
req = self.req(self.path_with_query % 'status=baloney',
use_admin_context=False)
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 0)
def test_get_servers_deleted_status_as_user(self):
req = self.req(self.path_with_query % 'status=deleted',
use_admin_context=False)
self.assertRaises(webob.exc.HTTPForbidden,
self.controller.detail, req)
def test_get_servers_deleted_status_as_admin(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIn('vm_state', search_opts)
self.assertEqual(search_opts['vm_state'], ['deleted'])
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'status=deleted',
use_admin_context=True)
servers = self.controller.detail(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_deleted_filter_str_to_bool(self):
db_list = objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake,
vm_state='deleted')])
self.mock_get_all.side_effect = None
self.mock_get_all.return_value = db_list
req = self.req(self.path_with_query % 'deleted=true',
use_admin_context=True)
servers = self.controller.detail(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
# Assert that 'deleted' filter value is converted to boolean
# while calling get_all() method.
expected_search_opts = {'deleted': True, 'project_id': self.project_id}
self.assertEqual(expected_search_opts,
self.mock_get_all.call_args[1]['search_opts'])
def test_get_servers_deleted_filter_invalid_str(self):
db_list = objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = None
self.mock_get_all.return_value = db_list
req = fakes.HTTPRequest.blank(self.path_with_query % 'deleted=abc',
use_admin_context=True)
servers = self.controller.detail(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
# Assert that invalid 'deleted' filter value is converted to boolean
# False while calling get_all() method.
expected_search_opts = {'deleted': False,
'project_id': self.project_id}
self.assertEqual(expected_search_opts,
self.mock_get_all.call_args[1]['search_opts'])
def test_get_servers_allows_name(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('name', search_opts)
self.assertEqual(search_opts['name'], 'whee.*')
self.assertEqual([], expected_attrs)
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'name=whee.*')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_flavor_not_found(self):
self.mock_get_all.side_effect = exception.FlavorNotFound(flavor_id=1)
req = fakes.HTTPRequest.blank(
self.path_with_query % 'status=active&flavor=abc')
servers = self.controller.index(req)['servers']
self.assertEqual(0, len(servers))
def test_get_servers_allows_changes_since(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('changes-since', search_opts)
changes_since = datetime.datetime(2011, 1, 24, 17, 8, 1,
tzinfo=iso8601.iso8601.UTC)
self.assertEqual(search_opts['changes-since'], changes_since)
self.assertNotIn('deleted', search_opts)
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
params = 'changes-since=2011-01-24T17:08:01Z'
req = self.req(self.path_with_query % params)
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_allows_changes_since_bad_value(self):
params = 'changes-since=asdf'
req = self.req(self.path_with_query % params)
self.assertRaises(exception.ValidationError, self.controller.index,
req)
def test_get_servers_allows_changes_since_bad_value_on_compat_mode(self):
params = 'changes-since=asdf'
req = self.req(self.path_with_query % params)
req.set_legacy_v2()
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.index,
req)
def test_get_servers_admin_filters_as_user(self):
"""Test getting servers by admin-only or unknown options when
context is not admin. Make sure the admin and unknown options
are stripped before they get to compute_api.get_all()
"""
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
# Allowed by user
self.assertIn('name', search_opts)
self.assertIn('ip', search_opts)
# OSAPI converts status to vm_state
self.assertIn('vm_state', search_opts)
# Allowed only by admins with admin API on
self.assertNotIn('unknown_option', search_opts)
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
query_str = "name=foo&ip=10.*&status=active&unknown_option=meow"
req = fakes.HTTPRequest.blank(self.path_with_query % query_str)
res = self.controller.index(req)
servers = res['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_admin_options_as_admin(self):
"""Test getting servers by admin-only or unknown options when
context is admin. All options should be passed
"""
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
# Allowed by user
self.assertIn('name', search_opts)
self.assertIn('terminated_at', search_opts)
# OSAPI converts status to vm_state
self.assertIn('vm_state', search_opts)
# Allowed only by admins with admin API on
self.assertIn('ip', search_opts)
self.assertNotIn('unknown_option', search_opts)
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
query_str = ("name=foo&ip=10.*&status=active&unknown_option=meow&"
"terminated_at=^2016-02-01.*")
req = self.req(self.path_with_query % query_str,
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_admin_filters_as_user_with_policy_override(self):
"""Test getting servers by admin-only or unknown options when
context is not admin but policy allows.
"""
server_uuid = uuids.fake
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
# Allowed by user
self.assertIn('name', search_opts)
self.assertIn('terminated_at', search_opts)
# OSAPI converts status to vm_state
self.assertIn('vm_state', search_opts)
# Allowed only by admins with admin API on
self.assertIn('ip', search_opts)
self.assertNotIn('unknown_option', search_opts)
# "hidden" is ignored as a filter parameter since it is only used
# internally
self.assertNotIn('hidden', search_opts)
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=server_uuid)])
rules = {
"os_compute_api:servers:index": "project_id:%s" % self.project_id,
"os_compute_api:servers:allow_all_filters":
"project_id:%s" % self.project_id,
}
policy.set_rules(oslo_policy.Rules.from_dict(rules))
self.mock_get_all.side_effect = fake_get_all
query_str = ("name=foo&ip=10.*&status=active&unknown_option=meow&"
"terminated_at=^2016-02-01.*&hidden=true")
req = self.req(self.path_with_query % query_str)
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_allows_ip(self):
"""Test getting servers by ip."""
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('ip', search_opts)
self.assertEqual(search_opts['ip'], r'10\..*')
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % r'ip=10\..*')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_admin_allows_ip6(self):
"""Test getting servers by ip6 with admin_api enabled and
admin context
"""
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('ip6', search_opts)
self.assertEqual(search_opts['ip6'], 'ffff.*')
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'ip6=ffff.*',
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_allows_ip6_with_new_version(self):
"""Test getting servers by ip6 with new version requested
and no admin context
"""
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('ip6', search_opts)
self.assertEqual(search_opts['ip6'], 'ffff.*')
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'ip6=ffff.*')
req.api_version_request = api_version_request.APIVersionRequest('2.5')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_admin_allows_access_ip_v4(self):
"""Test getting servers by access_ip_v4 with admin_api enabled and
admin context
"""
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('access_ip_v4', search_opts)
self.assertEqual(search_opts['access_ip_v4'], 'ffff.*')
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'access_ip_v4=ffff.*',
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_admin_allows_access_ip_v6(self):
"""Test getting servers by access_ip_v6 with admin_api enabled and
admin context
"""
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('access_ip_v6', search_opts)
self.assertEqual(search_opts['access_ip_v6'], 'ffff.*')
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'access_ip_v6=ffff.*',
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def _assertServerUsage(self, server, launched_at, terminated_at):
resp_launched_at = timeutils.parse_isotime(
server.get('OS-SRV-USG:launched_at'))
self.assertEqual(timeutils.normalize_time(resp_launched_at),
launched_at)
resp_terminated_at = timeutils.parse_isotime(
server.get('OS-SRV-USG:terminated_at'))
self.assertEqual(timeutils.normalize_time(resp_terminated_at),
terminated_at)
def test_show_server_usage(self):
DATE1 = datetime.datetime(year=2013, month=4, day=5, hour=12)
DATE2 = datetime.datetime(year=2013, month=4, day=5, hour=13)
req = self.req(self.path_with_id % FAKE_UUID)
req.accept = 'application/json'
req.method = 'GET'
self.mock_get.side_effect = fakes.fake_compute_get(
id=1, uuid=FAKE_UUID, launched_at=DATE1, terminated_at=DATE2,
project_id=req.environ['nova.context'].project_id)
res = req.get_response(compute.APIRouterV21())
self.assertEqual(res.status_int, 200)
self.useFixture(utils_fixture.TimeFixture())
self._assertServerUsage(jsonutils.loads(res.body).get('server'),
launched_at=DATE1,
terminated_at=DATE2)
def test_detail_server_usage(self):
DATE1 = datetime.datetime(year=2013, month=4, day=5, hour=12)
DATE2 = datetime.datetime(year=2013, month=4, day=5, hour=13)
DATE3 = datetime.datetime(year=2013, month=4, day=5, hour=14)
def fake_compute_get_all(*args, **kwargs):
db_list = [
fakes.stub_instance_obj(context, id=2, uuid=FAKE_UUID,
launched_at=DATE2,
terminated_at=DATE3),
fakes.stub_instance_obj(context, id=3, uuid=FAKE_UUID,
launched_at=DATE1,
terminated_at=DATE3),
]
return objects.InstanceList(objects=db_list)
self.mock_get_all.side_effect = fake_compute_get_all
req = self.req(self.path_detail)
req.accept = 'application/json'
servers = req.get_response(compute.APIRouterV21())
self.assertEqual(servers.status_int, 200)
self._assertServerUsage(jsonutils.loads(
servers.body).get('servers')[0],
launched_at=DATE2,
terminated_at=DATE3)
self._assertServerUsage(jsonutils.loads(
servers.body).get('servers')[1],
launched_at=DATE1,
terminated_at=DATE3)
def test_get_all_server_details(self):
expected_flavor = {
"id": "2",
"links": [
{
"rel": "bookmark",
"href": ('http://localhost/%s/flavors/2' %
self.project_id),
},
],
}
expected_image = {
"id": FAKE_UUID,
"links": [
{
"rel": "bookmark",
"href": ('http://localhost/%s/images/%s' % (
self.project_id, FAKE_UUID)),
},
],
}
req = self.req(self.path_detail)
res_dict = self.controller.detail(req)
for i, s in enumerate(res_dict['servers']):
self.assertEqual(s['id'], fakes.get_fake_uuid(i))
self.assertEqual(s['hostId'], '')
self.assertEqual(s['name'], 'server%d' % (i + 1))
self.assertEqual(s['image'], expected_image)
self.assertEqual(s['flavor'], expected_flavor)
self.assertEqual(s['status'], 'ACTIVE')
self.assertEqual(s['metadata']['seq'], str(i + 1))
def test_get_all_server_details_with_host(self):
"""We want to make sure that if two instances are on the same host,
then they return the same hostId. If two instances are on different
hosts, they should return different hostIds. In this test,
there are 5 instances - 2 on one host and 3 on another.
"""
def return_servers_with_host(*args, **kwargs):
return objects.InstanceList(
objects=[fakes.stub_instance_obj(None,
id=i + 1,
user_id='fake',
project_id='fake',
host=i % 2,
uuid=fakes.get_fake_uuid(i))
for i in range(5)])
self.mock_get_all.side_effect = return_servers_with_host
req = self.req(self.path_detail)
res_dict = self.controller.detail(req)
server_list = res_dict['servers']
host_ids = [server_list[0]['hostId'], server_list[1]['hostId']]
self.assertTrue(host_ids[0] and host_ids[1])
self.assertNotEqual(host_ids[0], host_ids[1])
for i, s in enumerate(server_list):
self.assertEqual(s['id'], fakes.get_fake_uuid(i))
self.assertEqual(s['hostId'], host_ids[i % 2])
self.assertEqual(s['name'], 'server%d' % (i + 1))
def test_get_servers_joins_services(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
cur = api_version_request.APIVersionRequest(self.wsgi_api_version)
v216 = api_version_request.APIVersionRequest('2.16')
if cur >= v216:
self.assertIn('services', expected_attrs)
else:
self.assertNotIn('services', expected_attrs)
return objects.InstanceList()
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_detail, use_admin_context=True)
self.assertIn('servers', self.controller.detail(req))
req = fakes.HTTPRequest.blank(self.path_detail,
use_admin_context=True,
version=self.wsgi_api_version)
self.assertIn('servers', self.controller.detail(req))
class ServersControllerTestV23(ServersControllerTest):
wsgi_api_version = '2.3'
def setUp(self):
super(ServersControllerTestV23, self).setUp()
self.request = self.req(self.path_with_id % FAKE_UUID)
self.project_id = self.request.environ['nova.context'].project_id
self.mock_get.side_effect = fakes.fake_compute_get(
id=2, uuid=FAKE_UUID,
node="node-fake",
reservation_id="r-1", launch_index=0,
kernel_id=UUID1, ramdisk_id=UUID2,
display_name="server2",
root_device_name="/dev/vda",
user_data="userdata",
metadata={"seq": "2"},
availability_zone='nova',
launched_at=None,
terminated_at=None,
task_state=None,
vm_state=vm_states.ACTIVE,
power_state=1,
project_id=self.project_id)
def _get_server_data_dict(self, uuid, image_bookmark, flavor_bookmark,
status="ACTIVE", progress=100):
server_dict = super(ServersControllerTestV23,
self)._get_server_data_dict(uuid,
image_bookmark,
flavor_bookmark,
status,
progress)
server_dict['server']["OS-EXT-SRV-ATTR:hostname"] = "server2"
server_dict['server'][
"OS-EXT-SRV-ATTR:hypervisor_hostname"] = "node-fake"
server_dict['server']["OS-EXT-SRV-ATTR:kernel_id"] = UUID1
server_dict['server']["OS-EXT-SRV-ATTR:launch_index"] = 0
server_dict['server']["OS-EXT-SRV-ATTR:ramdisk_id"] = UUID2
server_dict['server']["OS-EXT-SRV-ATTR:reservation_id"] = "r-1"
server_dict['server']["OS-EXT-SRV-ATTR:root_device_name"] = "/dev/vda"
server_dict['server']["OS-EXT-SRV-ATTR:user_data"] = "userdata"
server_dict['server']["OS-EXT-STS:task_state"] = None
server_dict['server']["OS-EXT-STS:vm_state"] = vm_states.ACTIVE
server_dict['server']["OS-EXT-STS:power_state"] = 1
server_dict['server']["os-extended-volumes:volumes_attached"] = [
{'id': 'some_volume_1', 'delete_on_termination': True},
{'id': 'some_volume_2', 'delete_on_termination': False}]
server_dict['server']["tenant_id"] = self.project_id
return server_dict
def test_show(self):
image_bookmark = "http://localhost/%s/images/%s" % (
self.project_id, FAKE_UUID)
flavor_bookmark = "http://localhost/%s/flavors/2" % self.project_id
res_dict = self.controller.show(self.request, FAKE_UUID)
expected_server = self._get_server_data_dict(FAKE_UUID,
image_bookmark,
flavor_bookmark,
progress=0)
self.assertThat(res_dict, matchers.DictMatches(expected_server))
def test_detail(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None):
obj_list = []
for i in range(2):
server = fakes.stub_instance_obj(context,
id=2, uuid=FAKE_UUID,
node="node-fake",
reservation_id="r-1", launch_index=0,
kernel_id=UUID1, ramdisk_id=UUID2,
display_name="server2",
root_device_name="/dev/vda",
user_data="userdata",
metadata={"seq": "2"},
availability_zone='nova',
launched_at=None,
terminated_at=None,
task_state=None,
vm_state=vm_states.ACTIVE,
power_state=1,
project_id=context.project_id)
obj_list.append(server)
return objects.InstanceList(objects=obj_list)
self.mock_get_all.side_effect = None
req = self.req(self.path_detail)
self.mock_get_all.return_value = fake_get_all(
req.environ['nova.context'])
servers_list = self.controller.detail(req)
image_bookmark = "http://localhost/%s/images/%s" % (
self.project_id, FAKE_UUID)
flavor_bookmark = "http://localhost/%s/flavors/2" % self.project_id
expected_server = self._get_server_data_dict(FAKE_UUID,
image_bookmark,
flavor_bookmark,
progress=0)
self.assertIn(expected_server['server'], servers_list['servers'])
class ServersControllerTestV29(ServersControllerTest):
wsgi_api_version = '2.9'
def setUp(self):
super(ServersControllerTestV29, self).setUp()
self.mock_get.side_effect = fakes.fake_compute_get(
id=2, uuid=FAKE_UUID,
node="node-fake",
reservation_id="r-1", launch_index=0,
kernel_id=UUID1, ramdisk_id=UUID2,
display_name="server2",
root_device_name="/dev/vda",
user_data="userdata",
metadata={"seq": "2"},
availability_zone='nova',
launched_at=None,
terminated_at=None,
task_state=None,
vm_state=vm_states.ACTIVE,
power_state=1,
project_id=self.request.environ['nova.context'].project_id)
def _get_server_data_dict(self, uuid, image_bookmark, flavor_bookmark,
status="ACTIVE", progress=100):
server_dict = super(ServersControllerTestV29,
self)._get_server_data_dict(uuid,
image_bookmark,
flavor_bookmark,
status,
progress)
server_dict['server']['locked'] = False
server_dict['server']["OS-EXT-SRV-ATTR:hostname"] = "server2"
server_dict['server'][
"OS-EXT-SRV-ATTR:hypervisor_hostname"] = "node-fake"
server_dict['server']["OS-EXT-SRV-ATTR:kernel_id"] = UUID1
server_dict['server']["OS-EXT-SRV-ATTR:launch_index"] = 0
server_dict['server']["OS-EXT-SRV-ATTR:ramdisk_id"] = UUID2
server_dict['server']["OS-EXT-SRV-ATTR:reservation_id"] = "r-1"
server_dict['server']["OS-EXT-SRV-ATTR:root_device_name"] = "/dev/vda"
server_dict['server']["OS-EXT-SRV-ATTR:user_data"] = "userdata"
server_dict['server']["OS-EXT-STS:task_state"] = None
server_dict['server']["OS-EXT-STS:vm_state"] = vm_states.ACTIVE
server_dict['server']["OS-EXT-STS:power_state"] = 1
server_dict['server']["os-extended-volumes:volumes_attached"] = [
{'id': 'some_volume_1', 'delete_on_termination': True},
{'id': 'some_volume_2', 'delete_on_termination': False}]
server_dict['server']["tenant_id"] = self.request.environ[
'nova.context'].project_id
return server_dict
def _test_get_server_with_lock(self, locked_by):
image_bookmark = "http://localhost/%s/images/%s" % (
self.project_id, FAKE_UUID)
flavor_bookmark = "http://localhost/%s/flavors/2" % self.project_id
req = self.req(self.path_with_id % FAKE_UUID)
project_id = req.environ['nova.context'].project_id
self.mock_get.side_effect = fakes.fake_compute_get(
id=2, locked_by=locked_by, uuid=FAKE_UUID,
node="node-fake",
reservation_id="r-1", launch_index=0,
kernel_id=UUID1, ramdisk_id=UUID2,
display_name="server2",
root_device_name="/dev/vda",
user_data="userdata",
metadata={"seq": "2"},
availability_zone='nova',
launched_at=None,
terminated_at=None,
task_state=None,
vm_state=vm_states.ACTIVE,
power_state=1,
project_id=project_id)
res_dict = self.controller.show(req, FAKE_UUID)
expected_server = self._get_server_data_dict(FAKE_UUID,
image_bookmark,
flavor_bookmark,
progress=0)
expected_server['server']['locked'] = True if locked_by else False
expected_server['server']['tenant_id'] = project_id
self.assertThat(res_dict, matchers.DictMatches(expected_server))
return res_dict
def test_get_server_with_locked_by_admin(self):
res_dict = self._test_get_server_with_lock('admin')
self.assertTrue(res_dict['server']['locked'])
def test_get_server_with_locked_by_owner(self):
res_dict = self._test_get_server_with_lock('owner')
self.assertTrue(res_dict['server']['locked'])
def test_get_server_not_locked(self):
res_dict = self._test_get_server_with_lock(None)
self.assertFalse(res_dict['server']['locked'])
def _test_list_server_detail_with_lock(self,
s1_locked,
s2_locked):
self.mock_get_all.side_effect = None
self.mock_get_all.return_value = fake_instance_get_all_with_locked(
context, [s1_locked, s2_locked],
node="node-fake",
reservation_id="r-1", launch_index=0,
kernel_id=UUID1, ramdisk_id=UUID2,
display_name="server2",
root_device_name="/dev/vda",
user_data="userdata",
metadata={"seq": "2"},
availability_zone='nova',
launched_at=None,
terminated_at=None,
task_state=None,
vm_state=vm_states.ACTIVE,
power_state=1)
req = self.req(self.path_detail)
servers_list = self.controller.detail(req)
# Check that each returned server has the same 'locked' value
# and 'id' as they were created.
for locked in [s1_locked, s2_locked]:
server = next(server for server in servers_list['servers']
if (server['id'] == fakes.get_fake_uuid(locked)))
expected = False if locked == 'not_locked' else True
self.assertEqual(expected, server['locked'])
def test_list_server_detail_with_locked_s1_admin_s2_owner(self):
self._test_list_server_detail_with_lock('admin', 'owner')
def test_list_server_detail_with_locked_s1_owner_s2_admin(self):
self._test_list_server_detail_with_lock('owner', 'admin')
def test_list_server_detail_with_locked_s1_admin_s2_admin(self):
self._test_list_server_detail_with_lock('admin', 'admin')
def test_list_server_detail_with_locked_s1_admin_s2_not_locked(self):
self._test_list_server_detail_with_lock('admin', 'not_locked')
def test_list_server_detail_with_locked_s1_s2_not_locked(self):
self._test_list_server_detail_with_lock('not_locked',
'not_locked')
def test_get_servers_remove_non_search_options(self):
self.mock_get_all.side_effect = None
req = fakes.HTTPRequestV21.blank('/servers'
'?sort_key=uuid&sort_dir=asc'
'&sort_key=user_id&sort_dir=desc'
'&limit=1&marker=123',
use_admin_context=True)
self.controller.index(req)
kwargs = self.mock_get_all.call_args[1]
search_opts = kwargs['search_opts']
for key in ('sort_key', 'sort_dir', 'limit', 'marker'):
self.assertNotIn(key, search_opts)
class ServersControllerTestV216(ServersControllerTest):
wsgi_api_version = '2.16'
def setUp(self):
super(ServersControllerTestV216, self).setUp()
self.mock_get.side_effect = fakes.fake_compute_get(
id=2, uuid=FAKE_UUID,
host="node-fake",
node="node-fake",
reservation_id="r-1", launch_index=0,
kernel_id=UUID1, ramdisk_id=UUID2,
display_name="server2",
root_device_name="/dev/vda",
user_data="userdata",
metadata={"seq": "2"},
availability_zone='nova',
launched_at=None,
terminated_at=None,
task_state=None,
vm_state=vm_states.ACTIVE,
power_state=1,
project_id=self.request.environ['nova.context'].project_id)
self.mock_get_instance_host_status = self.useFixture(
fixtures.MockPatchObject(
compute_api.API, 'get_instance_host_status',
return_value='UP')).mock
def _get_server_data_dict(self, uuid, image_bookmark, flavor_bookmark,
status="ACTIVE", progress=100):
server_dict = super(ServersControllerTestV216,
self)._get_server_data_dict(uuid,
image_bookmark,
flavor_bookmark,
status,
progress)
server_dict['server']['locked'] = False
server_dict['server']["host_status"] = "UP"
server_dict['server']["OS-EXT-SRV-ATTR:hostname"] = "server2"
server_dict['server']['hostId'] = nova_utils.generate_hostid(
'node-fake', server_dict['server']['tenant_id'])
server_dict['server']["OS-EXT-SRV-ATTR:host"] = "node-fake"
server_dict['server'][
"OS-EXT-SRV-ATTR:hypervisor_hostname"] = "node-fake"
server_dict['server']["OS-EXT-SRV-ATTR:kernel_id"] = UUID1
server_dict['server']["OS-EXT-SRV-ATTR:launch_index"] = 0
server_dict['server']["OS-EXT-SRV-ATTR:ramdisk_id"] = UUID2
server_dict['server']["OS-EXT-SRV-ATTR:reservation_id"] = "r-1"
server_dict['server']["OS-EXT-SRV-ATTR:root_device_name"] = "/dev/vda"
server_dict['server']["OS-EXT-SRV-ATTR:user_data"] = "userdata"
server_dict['server']["OS-EXT-STS:task_state"] = None
server_dict['server']["OS-EXT-STS:vm_state"] = vm_states.ACTIVE
server_dict['server']["OS-EXT-STS:power_state"] = 1
server_dict['server']["os-extended-volumes:volumes_attached"] = [
{'id': 'some_volume_1', 'delete_on_termination': True},
{'id': 'some_volume_2', 'delete_on_termination': False}]
server_dict['server']['tenant_id'] = self.request.environ[
'nova.context'].project_id
return server_dict
@mock.patch('nova.compute.api.API.get_instance_host_status')
def _verify_host_status_policy_behavior(self, func, mock_get_host_status):
# Set policy to disallow both host_status cases and verify we don't
# call the get_instance_host_status compute RPC API.
rules = {
'os_compute_api:servers:show:host_status': '!',
'os_compute_api:servers:show:host_status:unknown-only': '!',
}
orig_rules = policy.get_rules()
policy.set_rules(oslo_policy.Rules.from_dict(rules), overwrite=False)
func()
mock_get_host_status.assert_not_called()
# Restore the original rules.
policy.set_rules(orig_rules)
def test_show(self):
image_bookmark = "http://localhost/%s/images/%s" % (
self.project_id, FAKE_UUID)
flavor_bookmark = "http://localhost/%s/flavors/2" % self.project_id
res_dict = self.controller.show(self.request, FAKE_UUID)
expected_server = self._get_server_data_dict(FAKE_UUID,
image_bookmark,
flavor_bookmark,
progress=0)
self.assertThat(res_dict, matchers.DictMatches(expected_server))
func = functools.partial(self.controller.show, self.request,
FAKE_UUID)
self._verify_host_status_policy_behavior(func)
def test_detail(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None):
obj_list = []
for i in range(2):
server = fakes.stub_instance_obj(context,
id=2, uuid=FAKE_UUID,
host="node-fake",
node="node-fake",
reservation_id="r-1", launch_index=0,
kernel_id=UUID1, ramdisk_id=UUID2,
display_name="server2",
root_device_name="/dev/vda",
user_data="userdata",
metadata={"seq": "2"},
availability_zone='nova',
launched_at=None,
terminated_at=None,
task_state=None,
vm_state=vm_states.ACTIVE,
power_state=1,
project_id=context.project_id)
obj_list.append(server)
return objects.InstanceList(objects=obj_list)
self.mock_get_all.side_effect = None
req = self.req(self.path_detail)
self.mock_get_all.return_value = fake_get_all(
req.environ['nova.context'])
servers_list = self.controller.detail(req)
self.assertEqual(2, len(servers_list['servers']))
image_bookmark = "http://localhost/%s/images/%s" % (
self.project_id, FAKE_UUID)
flavor_bookmark = "http://localhost/%s/flavors/2" % self.project_id
expected_server = self._get_server_data_dict(FAKE_UUID,
image_bookmark,
flavor_bookmark,
progress=0)
self.assertIn(expected_server['server'], servers_list['servers'])
# We should have only gotten the host status once per host (and the
# 2 servers in the response are using the same host).
self.mock_get_instance_host_status.assert_called_once()
func = functools.partial(self.controller.detail, req)
self._verify_host_status_policy_behavior(func)
class ServersControllerTestV219(ServersControllerTest):
wsgi_api_version = '2.19'
def setUp(self):
super(ServersControllerTestV219, self).setUp()
self.mock_get.side_effect = fakes.fake_compute_get(
id=2, uuid=FAKE_UUID,
node="node-fake",
reservation_id="r-1", launch_index=0,
kernel_id=UUID1, ramdisk_id=UUID2,
display_name="server2",
root_device_name="/dev/vda",
user_data="userdata",
metadata={"seq": "2"},
availability_zone='nova',
launched_at=None,
terminated_at=None,
task_state=None,
vm_state=vm_states.ACTIVE,
power_state=1,
project_id=self.request.environ['nova.context'].project_id)
self.useFixture(fixtures.MockPatchObject(
compute_api.API, 'get_instance_host_status',
return_value='UP')).mock
def _get_server_data_dict(self, uuid, image_bookmark, flavor_bookmark,
status="ACTIVE", progress=100, description=None):
server_dict = super(ServersControllerTestV219,
self)._get_server_data_dict(uuid,
image_bookmark,
flavor_bookmark,
status,
progress)
server_dict['server']['locked'] = False
server_dict['server']['description'] = description
server_dict['server']["host_status"] = "UP"
server_dict['server']["OS-EXT-SRV-ATTR:hostname"] = "server2"
server_dict['server'][
"OS-EXT-SRV-ATTR:hypervisor_hostname"] = "node-fake"
server_dict['server']["OS-EXT-SRV-ATTR:kernel_id"] = UUID1
server_dict['server']["OS-EXT-SRV-ATTR:launch_index"] = 0
server_dict['server']["OS-EXT-SRV-ATTR:ramdisk_id"] = UUID2
server_dict['server']["OS-EXT-SRV-ATTR:reservation_id"] = "r-1"
server_dict['server']["OS-EXT-SRV-ATTR:root_device_name"] = "/dev/vda"
server_dict['server']["OS-EXT-SRV-ATTR:user_data"] = "userdata"
server_dict['server']["OS-EXT-STS:task_state"] = None
server_dict['server']["OS-EXT-STS:vm_state"] = vm_states.ACTIVE
server_dict['server']["OS-EXT-STS:power_state"] = 1
server_dict['server']["os-extended-volumes:volumes_attached"] = [
{'id': 'some_volume_1', 'delete_on_termination': True},
{'id': 'some_volume_2', 'delete_on_termination': False}]
return server_dict
def _test_get_server_with_description(self, description):
image_bookmark = "http://localhost/%s/images/%s" % (
self.project_id, FAKE_UUID)
flavor_bookmark = "http://localhost/%s/flavors/2" % self.project_id
req = self.req(self.path_with_id % FAKE_UUID)
project_id = req.environ['nova.context'].project_id
self.mock_get.side_effect = fakes.fake_compute_get(
id=2, display_description=description, uuid=FAKE_UUID,
node="node-fake",
reservation_id="r-1", launch_index=0,
kernel_id=UUID1, ramdisk_id=UUID2,
display_name="server2",
root_device_name="/dev/vda",
user_data="userdata",
metadata={"seq": "2"},
availability_zone='nova',
launched_at=None,
terminated_at=None,
task_state=None,
vm_state=vm_states.ACTIVE,
power_state=1,
project_id=project_id)
res_dict = self.controller.show(req, FAKE_UUID)
expected_server = self._get_server_data_dict(FAKE_UUID,
image_bookmark,
flavor_bookmark,
progress=0,
description=description)
expected_server['server']['tenant_id'] = project_id
self.assertThat(res_dict, matchers.DictMatches(expected_server))
return res_dict
def _test_list_server_detail_with_descriptions(self,
s1_desc,
s2_desc):
self.mock_get_all.side_effect = None
self.mock_get_all.return_value = (
fake_instance_get_all_with_description(context,
[s1_desc, s2_desc],
launched_at=None,
terminated_at=None))
req = self.req(self.path_detail)
servers_list = self.controller.detail(req)
# Check that each returned server has the same 'description' value
# and 'id' as they were created.
for desc in [s1_desc, s2_desc]:
server = next(server for server in servers_list['servers']
if (server['id'] == fakes.get_fake_uuid(desc)))
expected = desc
self.assertEqual(expected, server['description'])
def test_get_server_with_description(self):
self._test_get_server_with_description('test desc')
def test_list_server_detail_with_descriptions(self):
self._test_list_server_detail_with_descriptions('desc1', 'desc2')
class ServersControllerTestV226(ControllerTest):
wsgi_api_version = '2.26'
def test_get_server_with_tags_by_id(self):
req = fakes.HTTPRequest.blank(self.path_with_id % FAKE_UUID,
version=self.wsgi_api_version)
ctxt = req.environ['nova.context']
tags = ['tag1', 'tag2']
def fake_get(*args, **kwargs):
self.assertIn('tags', kwargs['expected_attrs'])
fake_server = fakes.stub_instance_obj(
ctxt, id=2, vm_state=vm_states.ACTIVE, progress=100,
project_id=ctxt.project_id)
tag_list = objects.TagList(objects=[
objects.Tag(resource_id=FAKE_UUID, tag=tag)
for tag in tags])
fake_server.tags = tag_list
return fake_server
self.mock_get.side_effect = fake_get
res_dict = self.controller.show(req, FAKE_UUID)
self.assertIn('tags', res_dict['server'])
self.assertEqual(tags, res_dict['server']['tags'])
def _test_get_servers_allows_tag_filters(self, filter_name):
query_string = '%s=t1,t2' % filter_name
req = fakes.HTTPRequest.blank(self.path_with_query % query_string,
version=self.wsgi_api_version)
def fake_get_all(*a, **kw):
self.assertIsNotNone(kw['search_opts'])
self.assertIn(filter_name, kw['search_opts'])
self.assertEqual(kw['search_opts'][filter_name], ['t1', 't2'])
return objects.InstanceList(
objects=[fakes.stub_instance_obj(req.environ['nova.context'],
uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_allows_tags_filter(self):
self._test_get_servers_allows_tag_filters('tags')
def test_get_servers_allows_tags_any_filter(self):
self._test_get_servers_allows_tag_filters('tags-any')
def test_get_servers_allows_not_tags_filter(self):
self._test_get_servers_allows_tag_filters('not-tags')
def test_get_servers_allows_not_tags_any_filter(self):
self._test_get_servers_allows_tag_filters('not-tags-any')
class ServerControllerTestV238(ControllerTest):
wsgi_api_version = '2.38'
def _test_invalid_status(self, is_admin):
req = fakes.HTTPRequest.blank(
self.path_detail_with_query % 'status=invalid',
version=self.wsgi_api_version, use_admin_context=is_admin)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.detail, req)
def test_list_servers_detail_invalid_status_for_admin(self):
self._test_invalid_status(True)
def test_list_servers_detail_invalid_status_for_non_admin(self):
self._test_invalid_status(False)
class ServerControllerTestV247(ControllerTest):
"""Server controller test for microversion 2.47
The intent here is simply to verify that when showing server details
after microversion 2.47 that the flavor is shown as a dict of flavor
information rather than as dict of id/links. The existence of the
'extra_specs' key is controlled by policy.
"""
wsgi_api_version = '2.47'
@mock.patch.object(objects.TagList, 'get_by_resource_id')
def test_get_all_server_details(self, mock_get_by_resource_id):
# Fake out tags on the instances
mock_get_by_resource_id.return_value = objects.TagList()
expected_flavor = {
'disk': 20,
'ephemeral': 0,
'extra_specs': {},
'original_name': u'm1.small',
'ram': 2048,
'swap': 0,
'vcpus': 1}
req = fakes.HTTPRequest.blank(self.path_detail,
version=self.wsgi_api_version)
hits = []
real_auth = policy.authorize
# Wrapper for authorize to count the number of times
# we authorize for extra-specs
def fake_auth(context, action, target):
if 'extra-specs' in action:
hits.append(1)
return real_auth(context, action, target)
with mock.patch('nova.policy.authorize') as mock_auth:
mock_auth.side_effect = fake_auth
res_dict = self.controller.detail(req)
# We should have found more than one servers, but only hit the
# policy check once
self.assertGreater(len(res_dict['servers']), 1)
self.assertEqual(1, len(hits))
for i, s in enumerate(res_dict['servers']):
self.assertEqual(s['flavor'], expected_flavor)
@mock.patch.object(objects.TagList, 'get_by_resource_id')
def test_get_all_server_details_no_extra_spec(self,
mock_get_by_resource_id):
# Fake out tags on the instances
mock_get_by_resource_id.return_value = objects.TagList()
# Set the policy so we don't have permission to index
# flavor extra-specs but are able to get server details.
servers_rule = 'os_compute_api:servers:detail'
extraspec_rule = 'os_compute_api:os-flavor-extra-specs:index'
self.policy.set_rules({
extraspec_rule: 'rule:admin_api',
servers_rule: '@'})
expected_flavor = {
'disk': 20,
'ephemeral': 0,
'original_name': u'm1.small',
'ram': 2048,
'swap': 0,
'vcpus': 1}
req = fakes.HTTPRequest.blank(self.path_detail,
version=self.wsgi_api_version)
res_dict = self.controller.detail(req)
for i, s in enumerate(res_dict['servers']):
self.assertEqual(s['flavor'], expected_flavor)
class ServerControllerTestV266(ControllerTest):
"""Server controller test for microversion 2.66
Add changes-before parameter to get servers or servers details of
2.66 microversion.
Filters the response by a date and time stamp when the server last
changed. Those changed before the specified date and time stamp are
returned.
"""
wsgi_api_version = '2.66'
def req(self, url, use_admin_context=False):
return fakes.HTTPRequest.blank(url,
use_admin_context=use_admin_context,
version=self.wsgi_api_version)
def test_get_servers_allows_changes_before(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('changes-before', search_opts)
changes_before = datetime.datetime(2011, 1, 24, 17, 8, 1,
tzinfo=iso8601.iso8601.UTC)
self.assertEqual(search_opts['changes-before'], changes_before)
self.assertNotIn('deleted', search_opts)
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
params = 'changes-before=2011-01-24T17:08:01Z'
req = self.req(self.path_with_query % params)
req.api_version_request = api_version_request.APIVersionRequest('2.66')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_allows_changes_before_bad_value(self):
params = 'changes-before=asdf'
req = self.req(self.path_with_query % params)
req.api_version_request = api_version_request.APIVersionRequest('2.66')
self.assertRaises(exception.ValidationError, self.controller.index,
req)
def test_get_servers_allows_changes_before_bad_value_on_compat_mode(self):
params = 'changes-before=asdf'
req = self.req(self.path_with_query % params)
req.api_version_request = api_version_request.APIVersionRequest('2.66')
req.set_legacy_v2()
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.index, req)
def test_get_servers_allows_changes_since_and_changes_before(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
self.assertIsNotNone(search_opts)
self.assertIn('changes-since', search_opts)
changes_since = datetime.datetime(2011, 1, 23, 17, 8, 1,
tzinfo=iso8601.iso8601.UTC)
self.assertIn('changes-before', search_opts)
changes_before = datetime.datetime(2011, 1, 24, 17, 8, 1,
tzinfo=iso8601.iso8601.UTC)
self.assertEqual(search_opts['changes-since'], changes_since)
self.assertEqual(search_opts['changes-before'], changes_before)
self.assertNotIn('deleted', search_opts)
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
params = 'changes-since=2011-01-23T17:08:01Z&' \
'changes-before=2011-01-24T17:08:01Z'
req = self.req(self.path_with_query % params)
req.api_version_request = api_version_request.APIVersionRequest('2.66')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_filters_with_distinct_changes_time_bad_request(self):
changes_since = '2018-09-04T05:45:27Z'
changes_before = '2018-09-03T05:45:27Z'
query_string = ('changes-since=%s&changes-before=%s' %
(changes_since, changes_before))
req = self.req(self.path_with_query % query_string)
req.api_version_request = api_version_request.APIVersionRequest('2.66')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.index, req)
class ServersControllerTestV271(ControllerTest):
wsgi_api_version = '2.71'
def req(self, url, use_admin_context=False):
return fakes.HTTPRequest.blank(url,
use_admin_context=use_admin_context,
version=self.wsgi_api_version)
def test_show_server_group_not_exist(self):
req = self.req(self.path_with_id % FAKE_UUID)
return_server = fakes.fake_compute_get(
id=2, vm_state=vm_states.ACTIVE,
project_id=req.environ['nova.context'].project_id)
self.mock_get.side_effect = return_server
servers = self.controller.show(req, FAKE_UUID)
expect_sg = []
self.assertEqual(expect_sg, servers['server']['server_groups'])
class ServersControllerTestV273(ControllerTest):
"""Server Controller test for microversion 2.73
The intent here is simply to verify that when showing server details
after microversion 2.73 the response will also have the locked_reason
key for the servers.
"""
wsgi_api_version = '2.73'
def req(self, url, use_admin_context=False):
return fakes.HTTPRequest.blank(url,
use_admin_context=use_admin_context,
version=self.wsgi_api_version)
def test_get_servers_with_locked_filter(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
db_list = [fakes.stub_instance(
100, uuid=uuids.fake, locked_by='fake')]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'locked=true')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
search = {'deleted': False, 'project_id': self.project_id,
'locked': True}
self.mock_get_all.assert_called_once_with(
req.environ['nova.context'], expected_attrs=[],
limit=1000, marker=None,
search_opts=search,
sort_dirs=['desc'], sort_keys=['created_at'],
cell_down_support=False, all_tenants=False)
def test_get_servers_with_locked_filter_invalid_value(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
db_list = [fakes.stub_instance(
100, uuid=uuids.fake, locked_by='fake')]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'locked=price')
exp = self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
self.assertIn("Unrecognized value 'price'", str(exp))
def test_get_servers_with_locked_filter_empty_value(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
db_list = [fakes.stub_instance(
100, uuid=uuids.fake, locked_by='fake')]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query % 'locked=')
exp = self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
self.assertIn("Unrecognized value ''", str(exp))
def test_get_servers_with_locked_sort_key(self):
def fake_get_all(context, search_opts=None,
limit=None, marker=None,
expected_attrs=None, sort_keys=None, sort_dirs=None,
cell_down_support=False, all_tenants=False):
db_list = [fakes.stub_instance(
100, uuid=uuids.fake, locked_by='fake')]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.mock_get_all.side_effect = fake_get_all
req = self.req(self.path_with_query %
'sort_dir=desc&sort_key=locked')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
self.mock_get_all.assert_called_once_with(
req.environ['nova.context'], expected_attrs=[],
limit=1000, marker=None,
search_opts={'deleted': False, 'project_id': self.project_id},
sort_dirs=['desc'], sort_keys=['locked'],
cell_down_support=False, all_tenants=False)
class ServersControllerTestV275(ControllerTest):
wsgi_api_version = '2.75'
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
@mock.patch('nova.compute.api.API.get_all')
def test_get_servers_additional_query_param_old_version(self, mock_get):
req = fakes.HTTPRequest.blank(self.path_with_query % 'unknown=1',
use_admin_context=True,
version='2.74')
self.controller.index(req)
@mock.patch('nova.compute.api.API.get_all')
def test_get_servers_ignore_sort_key_old_version(self, mock_get):
req = fakes.HTTPRequest.blank(
self.path_with_query % 'sort_key=deleted',
use_admin_context=True, version='2.74')
self.controller.index(req)
def test_get_servers_additional_query_param(self):
req = fakes.HTTPRequest.blank(self.path_with_query % 'unknown=1',
use_admin_context=True,
version=self.wsgi_api_version)
self.assertRaises(exception.ValidationError, self.controller.index,
req)
def test_get_servers_previously_ignored_sort_key(self):
for s_ignore in servers_schema.SERVER_LIST_IGNORE_SORT_KEY_V273:
req = fakes.HTTPRequest.blank(
self.path_with_query % 'sort_key=%s' % s_ignore,
use_admin_context=True,
version=self.wsgi_api_version)
self.assertRaises(exception.ValidationError, self.controller.index,
req)
def test_get_servers_additional_sort_key(self):
req = fakes.HTTPRequest.blank(
self.path_with_query % 'sort_key=unknown',
use_admin_context=True, version=self.wsgi_api_version)
self.assertRaises(exception.ValidationError, self.controller.index,
req)
def test_update_response_no_show_server_only_attributes_old_version(self):
# There are some old server attributes which were added only for
# GET server APIs not for PUT. GET server and PUT server share the
# same view builder method SHOW() to build the response, So make sure
# attributes which are not supposed to be included for PUT
# response are not present.
body = {'server': {'name': 'server_test'}}
req = fakes.HTTPRequest.blank(self.path_with_query % 'unknown=1',
use_admin_context=True,
version='2.74')
res_dict = self.controller.update(req, FAKE_UUID, body=body)
for field in GET_ONLY_FIELDS:
self.assertNotIn(field, res_dict['server'])
for items in res_dict['server']['addresses'].values():
for item in items:
self.assertNotIn('OS-EXT-IPS:type', item)
self.assertNotIn('OS-EXT-IPS-MAC:mac_addr', item)
def test_update_response_has_show_server_all_attributes(self):
body = {'server': {'name': 'server_test'}}
req = fakes.HTTPRequest.blank(self.path_with_query % 'unknown=1',
use_admin_context=True,
version=self.wsgi_api_version)
res_dict = self.controller.update(req, FAKE_UUID, body=body)
for field in GET_ONLY_FIELDS:
self.assertIn(field, res_dict['server'])
for items in res_dict['server']['addresses'].values():
for item in items:
self.assertIn('OS-EXT-IPS:type', item)
self.assertIn('OS-EXT-IPS-MAC:mac_addr', item)
def test_rebuild_response_no_show_server_only_attributes_old_version(self):
# There are some old server attributes which were added only for
# GET server APIs not for Rebuild. GET server and Rebuild server share
# same view builder method SHOW() to build the response, So make sure
# the attributes which are not supposed to be included for Rebuild
# response are not present.
body = {'rebuild': {"imageRef": self.image_uuid}}
req = fakes.HTTPRequest.blank(self.path_with_query % 'unknown=1',
use_admin_context=True,
version='2.74')
fake_get = fakes.fake_compute_get(
vm_state=vm_states.ACTIVE,
project_id=req.environ['nova.context'].project_id,
user_id=req.environ['nova.context'].user_id)
self.mock_get.side_effect = fake_get
res_dict = self.controller._action_rebuild(req, FAKE_UUID,
body=body).obj
get_only_fields_Rebuild = copy.deepcopy(GET_ONLY_FIELDS)
get_only_fields_Rebuild.remove('key_name')
for field in get_only_fields_Rebuild:
self.assertNotIn(field, res_dict['server'])
for items in res_dict['server']['addresses'].values():
for item in items:
self.assertNotIn('OS-EXT-IPS:type', item)
self.assertNotIn('OS-EXT-IPS-MAC:mac_addr', item)
def test_rebuild_response_has_show_server_all_attributes(self):
body = {'rebuild': {"imageRef": self.image_uuid}}
req = fakes.HTTPRequest.blank(self.path_with_query % 'unknown=1',
use_admin_context=True,
version=self.wsgi_api_version)
fake_get = fakes.fake_compute_get(
vm_state=vm_states.ACTIVE,
project_id=req.environ['nova.context'].project_id,
user_id=req.environ['nova.context'].user_id)
self.mock_get.side_effect = fake_get
res_dict = self.controller._action_rebuild(req, FAKE_UUID,
body=body).obj
for field in GET_ONLY_FIELDS:
if field == 'OS-EXT-SRV-ATTR:user_data':
self.assertNotIn(field, res_dict['server'])
field = 'user_data'
self.assertIn(field, res_dict['server'])
for items in res_dict['server']['addresses'].values():
for item in items:
self.assertIn('OS-EXT-IPS:type', item)
self.assertIn('OS-EXT-IPS-MAC:mac_addr', item)
class ServersControllerTestV283(ControllerTest):
filters = ['availability_zone', 'config_drive', 'key_name',
'created_at', 'launched_at', 'terminated_at',
'power_state', 'task_state', 'vm_state', 'progress',
'user_id']
def test_get_servers_by_new_filter_for_non_admin(self):
def fake_get_all(context, search_opts=None, **kwargs):
self.assertIsNotNone(search_opts)
for f in self.filters:
self.assertIn(f, search_opts)
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
self.mock_get_all.side_effect = fake_get_all
query_str = '&'.join('%s=test_value' % f for f in self.filters)
req = fakes.HTTPRequest.blank(self.path_with_query % query_str,
version='2.83')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
def test_get_servers_new_filters_for_non_admin_old_version(self):
def fake_get_all(context, search_opts=None, **kwargs):
self.assertIsNotNone(search_opts)
for f in self.filters:
self.assertNotIn(f, search_opts)
return objects.InstanceList(
objects=[])
# Without policy edition, test will fail and admin filter will work.
self.policy.set_rules({'os_compute_api:servers:index': ''})
self.mock_get_all.side_effect = fake_get_all
query_str = '&'.join('%s=test_value' % f for f in self.filters)
req = fakes.HTTPRequest.blank(self.path_with_query % query_str,
version='2.82')
servers = self.controller.index(req)['servers']
self.assertEqual(0, len(servers))
def test_get_servers_by_node_fail_non_admin(self):
def fake_get_all(context, search_opts=None, **kwargs):
self.assertIsNotNone(search_opts)
self.assertNotIn('node', search_opts)
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=uuids.fake)])
server_filter_rule = 'os_compute_api:servers:allow_all_filters'
self.policy.set_rules({'os_compute_api:servers:index': '',
server_filter_rule: 'role:admin'})
self.mock_get_all.side_effect = fake_get_all
query_str = "node=node1"
req = fakes.HTTPRequest.blank(self.path_with_query % query_str,
version='2.83')
servers = self.controller.index(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(uuids.fake, servers[0]['id'])
class ServersControllerDeleteTest(ControllerTest):
def setUp(self):
super(ServersControllerDeleteTest, self).setUp()
self.server_delete_called = False
def fake_delete(api, context, instance):
if instance.uuid == uuids.non_existent_uuid:
raise exception.InstanceNotFound(instance_id=instance.uuid)
self.server_delete_called = True
self.stub_out('nova.compute.api.API.delete', fake_delete)
def _create_delete_request(self, uuid):
fakes.stub_out_instance_quota(self, 0, 10)
req = fakes.HTTPRequestV21.blank(self.path_with_id % uuid)
req.method = 'DELETE'
fake_get = fakes.fake_compute_get(
uuid=uuid,
vm_state=vm_states.ACTIVE,
project_id=req.environ['nova.context'].project_id,
user_id=req.environ['nova.context'].user_id)
self.mock_get.side_effect = fake_get
return req
def _delete_server_instance(self, uuid=FAKE_UUID):
req = self._create_delete_request(uuid)
self.controller.delete(req, uuid)
def test_delete_server_instance(self):
self._delete_server_instance()
self.assertTrue(self.server_delete_called)
def test_delete_server_instance_not_found(self):
self.assertRaises(webob.exc.HTTPNotFound,
self._delete_server_instance,
uuid=uuids.non_existent_uuid)
def test_delete_server_instance_while_building(self):
req = self._create_delete_request(FAKE_UUID)
self.controller.delete(req, FAKE_UUID)
self.assertTrue(self.server_delete_called)
@mock.patch.object(compute_api.API, 'delete',
side_effect=exception.InstanceIsLocked(
instance_uuid=FAKE_UUID))
def test_delete_locked_server(self, mock_delete):
req = self._create_delete_request(FAKE_UUID)
self.assertRaises(webob.exc.HTTPConflict, self.controller.delete,
req, FAKE_UUID)
mock_delete.assert_called_once_with(
req.environ['nova.context'], test.MatchType(objects.Instance))
def test_delete_server_instance_while_resize(self):
req = self._create_delete_request(FAKE_UUID)
fake_get = fakes.fake_compute_get(
vm_state=vm_states.ACTIVE,
task_state=task_states.RESIZE_PREP,
project_id=req.environ['nova.context'].project_id,
user_id=req.environ['nova.context'].user_id)
self.mock_get.side_effect = fake_get
self.controller.delete(req, FAKE_UUID)
def test_delete_server_instance_if_not_launched(self):
self.flags(reclaim_instance_interval=3600)
req = fakes.HTTPRequestV21.blank(self.path_with_id % FAKE_UUID)
req.method = 'DELETE'
self.server_delete_called = False
fake_get = fakes.fake_compute_get(
launched_at=None,
project_id=req.environ['nova.context'].project_id,
user_id=req.environ['nova.context'].user_id)
self.mock_get.side_effect = fake_get
def instance_destroy_mock(*args, **kwargs):
self.server_delete_called = True
deleted_at = timeutils.utcnow()
return fake_instance.fake_db_instance(deleted_at=deleted_at)
self.stub_out('nova.db.api.instance_destroy', instance_destroy_mock)
self.controller.delete(req, FAKE_UUID)
# delete() should be called for instance which has never been active,
# even if reclaim_instance_interval has been set.
self.assertTrue(self.server_delete_called)
class ServersControllerRebuildInstanceTest(ControllerTest):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
expected_key_name = False
def setUp(self):
super(ServersControllerRebuildInstanceTest, self).setUp()
self.req = fakes.HTTPRequest.blank(self.path_action % FAKE_UUID)
self.req.method = 'POST'
self.req.headers["content-type"] = "application/json"
self.req_user_id = self.req.environ['nova.context'].user_id
self.req_project_id = self.req.environ['nova.context'].project_id
self.useFixture(nova_fixtures.SingleCellSimple())
def fake_get(ctrl, ctxt, uuid):
if uuid == 'test_inst':
raise webob.exc.HTTPNotFound(explanation='fakeout')
return fakes.stub_instance_obj(None,
vm_state=vm_states.ACTIVE,
project_id=self.req_project_id,
user_id=self.req_user_id)
self.useFixture(
fixtures.MonkeyPatch('nova.api.openstack.compute.servers.'
'ServersController._get_instance',
fake_get))
fake_get = fakes.fake_compute_get(vm_state=vm_states.ACTIVE,
project_id=self.req_project_id,
user_id=self.req_user_id)
self.mock_get.side_effect = fake_get
self.body = {
'rebuild': {
'name': 'new_name',
'imageRef': self.image_uuid,
'metadata': {
'open': 'stack',
},
},
}
def test_rebuild_server_with_image_not_uuid(self):
self.body['rebuild']['imageRef'] = 'not-uuid'
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID,
body=self.body)
def test_rebuild_server_with_image_as_full_url(self):
image_href = (
'http://localhost/v2/%s/images/'
'76fa36fc-c930-4bf3-8c8a-ea2a2420deb6' % self.project_id)
self.body['rebuild']['imageRef'] = image_href
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID,
body=self.body)
def test_rebuild_server_with_image_as_empty_string(self):
self.body['rebuild']['imageRef'] = ''
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID,
body=self.body)
def test_rebuild_instance_name_with_spaces_in_the_middle(self):
self.body['rebuild']['name'] = 'abc def'
self.req.body = jsonutils.dump_as_bytes(self.body)
self.controller._action_rebuild(self.req, FAKE_UUID, body=self.body)
def test_rebuild_instance_name_with_leading_trailing_spaces(self):
self.body['rebuild']['name'] = ' abc def '
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_instance_name_with_leading_trailing_spaces_compat_mode(
self):
self.body['rebuild']['name'] = ' abc def '
self.req.body = jsonutils.dump_as_bytes(self.body)
self.req.set_legacy_v2()
def fake_rebuild(*args, **kwargs):
self.assertEqual('abc def', kwargs['display_name'])
with mock.patch.object(compute_api.API, 'rebuild') as mock_rebuild:
mock_rebuild.side_effect = fake_rebuild
self.controller._action_rebuild(self.req, FAKE_UUID,
body=self.body)
def test_rebuild_instance_with_blank_metadata_key(self):
self.body['rebuild']['metadata'][''] = 'world'
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_instance_with_metadata_key_too_long(self):
self.body['rebuild']['metadata'][('a' * 260)] = 'world'
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_instance_with_metadata_value_too_long(self):
self.body['rebuild']['metadata']['key1'] = ('a' * 260)
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild, self.req,
FAKE_UUID, body=self.body)
def test_rebuild_instance_with_metadata_value_not_string(self):
self.body['rebuild']['metadata']['key1'] = 1
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild, self.req,
FAKE_UUID, body=self.body)
@mock.patch.object(nova_fixtures.GlanceFixture, 'show',
return_value=dict(
id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True,
status='active', properties={'key1': 'value1'},
min_ram="4096", min_disk="10"))
def test_rebuild_instance_fails_when_min_ram_too_small(self, mock_show):
# make min_ram larger than our instance ram size
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
mock_show.assert_called_once_with(
self.req.environ['nova.context'], self.image_uuid,
include_locations=False, show_deleted=True)
@mock.patch.object(nova_fixtures.GlanceFixture, 'show',
return_value=dict(
id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True,
status='active', properties={'key1': 'value1'},
min_ram="128", min_disk="100000"))
def test_rebuild_instance_fails_when_min_disk_too_small(self, mock_show):
# make min_disk larger than our instance disk size
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild, self.req,
FAKE_UUID, body=self.body)
mock_show.assert_called_once_with(
self.req.environ['nova.context'], self.image_uuid,
include_locations=False, show_deleted=True)
@mock.patch.object(nova_fixtures.GlanceFixture, 'show',
return_value=dict(
id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True,
status='active', size=str(1000 * (1024 ** 3))))
def test_rebuild_instance_image_too_large(self, mock_show):
# make image size larger than our instance disk size
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
mock_show.assert_called_once_with(
self.req.environ['nova.context'], self.image_uuid,
include_locations=False, show_deleted=True)
def test_rebuild_instance_name_all_blank(self):
self.body['rebuild']['name'] = ' '
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
@mock.patch.object(nova_fixtures.GlanceFixture, 'show',
return_value=dict(
id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True,
status='DELETED'))
def test_rebuild_instance_with_deleted_image(self, mock_show):
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
mock_show.assert_called_once_with(
self.req.environ['nova.context'], self.image_uuid,
include_locations=False, show_deleted=True)
def test_rebuild_instance_onset_file_limit_over_quota(self):
def fake_get_image(self, context, image_href, **kwargs):
return dict(id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True, status='active')
with test.nested(
mock.patch.object(nova_fixtures.GlanceFixture, 'show',
side_effect=fake_get_image),
mock.patch.object(self.controller.compute_api, 'rebuild',
side_effect=exception.OnsetFileLimitExceeded)
) as (
show_mock, rebuild_mock
):
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPForbidden,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_bad_personality(self):
# Personality files have been deprecated as of v2.57
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.56')
body = {
"rebuild": {
"imageRef": self.image_uuid,
"personality": [{
"path": "/path/to/file",
"contents": "INVALID b64",
}]
},
}
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
def test_rebuild_personality(self):
# Personality files have been deprecated as of v2.57
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.56')
body = {
"rebuild": {
"imageRef": self.image_uuid,
"personality": [{
"path": "/path/to/file",
"contents": base64.encode_as_text("Test String"),
}]
},
}
body = self.controller._action_rebuild(self.req, FAKE_UUID,
body=body).obj
self.assertNotIn('personality', body['server'])
def test_rebuild_response_has_no_show_server_only_attributes(self):
# There are some old server attributes which were added only for
# GET server APIs not for rebuild. GET server and Rebuild share the
# same view builder method SHOW() to build the response, So make sure
# attributes which are not supposed to be included for Rebuild
# response are not present.
body = {
"rebuild": {
"imageRef": self.image_uuid,
},
}
body = self.controller._action_rebuild(self.req, FAKE_UUID,
body=body).obj
get_only_fields = copy.deepcopy(GET_ONLY_FIELDS)
if self.expected_key_name:
get_only_fields.remove('key_name')
for field in get_only_fields:
self.assertNotIn(field, body['server'])
@mock.patch.object(compute_api.API, 'start')
def test_start(self, mock_start):
req = fakes.HTTPRequestV21.blank(self.path_action % FAKE_UUID)
body = dict(start="")
self.controller._start_server(req, FAKE_UUID, body)
mock_start.assert_called_once_with(mock.ANY, mock.ANY)
@mock.patch.object(compute_api.API, 'start', fake_start_stop_not_ready)
def test_start_not_ready(self):
req = fakes.HTTPRequestV21.blank(self.path_action % FAKE_UUID)
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._start_server, req, FAKE_UUID, body)
@mock.patch.object(
compute_api.API, 'start', fakes.fake_actions_to_locked_server)
def test_start_locked_server(self):
req = fakes.HTTPRequestV21.blank(self.path_action % FAKE_UUID)
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._start_server, req, FAKE_UUID, body)
@mock.patch.object(compute_api.API, 'start', fake_start_stop_invalid_state)
def test_start_invalid(self):
req = fakes.HTTPRequestV21.blank(self.path_action % FAKE_UUID)
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._start_server, req, FAKE_UUID, body)
@mock.patch.object(compute_api.API, 'stop')
def test_stop(self, mock_stop):
req = fakes.HTTPRequestV21.blank(self.path_action % FAKE_UUID)
body = dict(stop="")
self.controller._stop_server(req, FAKE_UUID, body)
mock_stop.assert_called_once_with(mock.ANY, mock.ANY)
@mock.patch.object(compute_api.API, 'stop', fake_start_stop_not_ready)
def test_stop_not_ready(self):
req = fakes.HTTPRequestV21.blank(self.path_action % FAKE_UUID)
body = dict(stop="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._stop_server, req, FAKE_UUID, body)
@mock.patch.object(
compute_api.API, 'stop', fakes.fake_actions_to_locked_server)
def test_stop_locked_server(self):
req = fakes.HTTPRequestV21.blank(self.path_action % FAKE_UUID)
body = dict(stop="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._stop_server, req, FAKE_UUID, body)
@mock.patch.object(compute_api.API, 'stop', fake_start_stop_invalid_state)
def test_stop_invalid_state(self):
req = fakes.HTTPRequestV21.blank(self.path_action % FAKE_UUID)
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._stop_server, req, FAKE_UUID, body)
@mock.patch(
'nova.db.api.instance_get_by_uuid',
fake_instance_get_by_uuid_not_found)
def test_start_with_bogus_id(self):
req = fakes.HTTPRequestV21.blank(self.path_action % 'test_inst')
body = dict(start="")
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._start_server, req, 'test_inst', body)
@mock.patch(
'nova.db.api.instance_get_by_uuid',
fake_instance_get_by_uuid_not_found)
def test_stop_with_bogus_id(self):
req = fakes.HTTPRequestV21.blank(self.path_action % 'test_inst')
body = dict(stop="")
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._stop_server, req, 'test_inst', body)
class ServersControllerRebuildTestV254(ServersControllerRebuildInstanceTest):
expected_key_name = True
def setUp(self):
super(ServersControllerRebuildTestV254, self).setUp()
fakes.stub_out_key_pair_funcs(self)
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.54')
def _test_set_key_name_rebuild(self, set_key_name=True):
key_name = "key"
fake_get = fakes.fake_compute_get(vm_state=vm_states.ACTIVE,
key_name=key_name,
project_id=self.req_project_id,
user_id=self.req_user_id)
self.mock_get.side_effect = fake_get
if set_key_name:
self.body['rebuild']['key_name'] = key_name
self.req.body = jsonutils.dump_as_bytes(self.body)
server = self.controller._action_rebuild(
self.req, FAKE_UUID,
body=self.body).obj['server']
self.assertEqual(server['id'], FAKE_UUID)
self.assertEqual(server['key_name'], key_name)
def test_rebuild_accepted_with_keypair_name(self):
self._test_set_key_name_rebuild()
def test_rebuild_key_not_changed(self):
self._test_set_key_name_rebuild(set_key_name=False)
def test_rebuild_invalid_microversion_253(self):
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.53')
body = {
"rebuild": {
"imageRef": self.image_uuid,
"key_name": "key"
},
}
excpt = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
self.assertIn('key_name', str(excpt))
def test_rebuild_with_not_existed_keypair_name(self):
body = {
"rebuild": {
"imageRef": self.image_uuid,
"key_name": "nonexistentkey"
},
}
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
def test_rebuild_user_has_no_key_pair(self):
def no_key_pair(context, user_id, name):
raise exception.KeypairNotFound(user_id=user_id, name=name)
self.stub_out('nova.db.api.key_pair_get', no_key_pair)
fake_get = fakes.fake_compute_get(vm_state=vm_states.ACTIVE,
key_name=None,
project_id=self.req_project_id,
user_id=self.req_user_id)
self.mock_get.side_effect = fake_get
self.body['rebuild']['key_name'] = "a-key-name"
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_with_non_string_keypair_name(self):
body = {
"rebuild": {
"imageRef": self.image_uuid,
"key_name": 12345
},
}
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
def test_rebuild_with_invalid_keypair_name(self):
body = {
"rebuild": {
"imageRef": self.image_uuid,
"key_name": "123\0d456"
},
}
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
def test_rebuild_with_empty_keypair_name(self):
body = {
"rebuild": {
"imageRef": self.image_uuid,
"key_name": ''
},
}
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
def test_rebuild_with_none_keypair_name(self):
key_name = None
fake_get = fakes.fake_compute_get(vm_state=vm_states.ACTIVE,
key_name=key_name,
project_id=self.req_project_id,
user_id=self.req_user_id)
self.mock_get.side_effect = fake_get
with mock.patch.object(objects.KeyPair, 'get_by_name') as key_get:
self.body['rebuild']['key_name'] = key_name
self.req.body = jsonutils.dump_as_bytes(self.body)
self.controller._action_rebuild(
self.req, FAKE_UUID,
body=self.body)
# NOTE: because the api will call _get_server twice. The server
# response will always be the same one. So we just use
# objects.KeyPair.get_by_name to verify test.
key_get.assert_not_called()
def test_rebuild_with_too_large_keypair_name(self):
body = {
"rebuild": {
"imageRef": self.image_uuid,
"key_name": 256 * "k"
},
}
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
class ServersControllerRebuildTestV257(ServersControllerRebuildTestV254):
"""Tests server rebuild at microversion 2.57 where user_data can be
provided and personality files are no longer accepted.
"""
def setUp(self):
super(ServersControllerRebuildTestV257, self).setUp()
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.57')
def test_rebuild_personality(self):
"""Tests that trying to rebuild with personality files fails."""
body = {
"rebuild": {
"imageRef": self.image_uuid,
"personality": [{
"path": "/path/to/file",
"contents": base64.encode_as_text("Test String"),
}]
}
}
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
self.assertIn('personality', str(ex))
def test_rebuild_user_data_old_version(self):
"""Tests that trying to rebuild with user_data before 2.57 fails."""
body = {
"rebuild": {
"imageRef": self.image_uuid,
"user_data": "ZWNobyAiaGVsbG8gd29ybGQi"
}
}
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.55')
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
self.assertIn('user_data', str(ex))
def test_rebuild_user_data_malformed(self):
"""Tests that trying to rebuild with malformed user_data fails."""
body = {
"rebuild": {
"imageRef": self.image_uuid,
"user_data": b'invalid'
}
}
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
self.assertIn('user_data', str(ex))
def test_rebuild_user_data_too_large(self):
"""Tests that passing user_data to rebuild that is too large fails."""
body = {
"rebuild": {
"imageRef": self.image_uuid,
"user_data": ('MQ==' * 16384)
}
}
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=body)
self.assertIn('user_data', str(ex))
@mock.patch.object(context.RequestContext, 'can')
@mock.patch('nova.db.api.instance_update_and_get_original')
def test_rebuild_reset_user_data(self, mock_update, mock_policy):
"""Tests that passing user_data=None resets the user_data on the
instance.
"""
body = {
"rebuild": {
"imageRef": self.image_uuid,
"user_data": None
}
}
self.mock_get.side_effect = None
self.mock_get.return_value = fakes.stub_instance_obj(
context.RequestContext(self.req_user_id, self.req_project_id),
user_data='ZWNobyAiaGVsbG8gd29ybGQi')
def fake_instance_update_and_get_original(
ctxt, instance_uuid, values, **kwargs):
# save() is called twice and the second one has system_metadata
# in the updates, so we can ignore that one.
if 'system_metadata' not in values:
self.assertIn('user_data', values)
self.assertIsNone(values['user_data'])
return instance_update_and_get_original(
ctxt, instance_uuid, values, **kwargs)
mock_update.side_effect = fake_instance_update_and_get_original
self.controller._action_rebuild(self.req, FAKE_UUID, body=body)
self.assertEqual(2, mock_update.call_count)
class ServersControllerRebuildTestV219(ServersControllerRebuildInstanceTest):
def setUp(self):
super(ServersControllerRebuildTestV219, self).setUp()
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.19')
def _rebuild_server(self, set_desc, desc):
fake_get = fakes.fake_compute_get(vm_state=vm_states.ACTIVE,
display_description=desc,
project_id=self.req_project_id,
user_id=self.req_user_id)
self.mock_get.side_effect = fake_get
if set_desc:
self.body['rebuild']['description'] = desc
self.req.body = jsonutils.dump_as_bytes(self.body)
server = self.controller._action_rebuild(self.req, FAKE_UUID,
body=self.body).obj['server']
self.assertEqual(server['id'], FAKE_UUID)
self.assertEqual(server['description'], desc)
def test_rebuild_server_with_description(self):
self._rebuild_server(True, 'server desc')
def test_rebuild_server_empty_description(self):
self._rebuild_server(True, '')
def test_rebuild_server_without_description(self):
self._rebuild_server(False, '')
def test_rebuild_server_remove_description(self):
self._rebuild_server(True, None)
def test_rebuild_server_description_too_long(self):
self.body['rebuild']['description'] = 'x' * 256
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_server_description_invalid(self):
# Invalid non-printable control char in the desc.
self.body['rebuild']['description'] = "123\0d456"
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
# NOTE(jaypipes): Not based from ServersControllerRebuildInstanceTest because
# that test case's setUp is completely b0rked
class ServersControllerRebuildTestV263(ControllerTest):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
def setUp(self):
super(ServersControllerRebuildTestV263, self).setUp()
self.req = fakes.HTTPRequest.blank(self.path_action % FAKE_UUID)
self.req.method = 'POST'
self.req.headers["content-type"] = "application/json"
self.req_user_id = self.req.environ['nova.context'].user_id
self.req_project_id = self.req.environ['nova.context'].project_id
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.63')
self.body = {
'rebuild': {
'name': 'new_name',
'imageRef': self.image_uuid,
'metadata': {
'open': 'stack',
},
},
}
@mock.patch('nova.compute.api.API.get')
def _rebuild_server(self, mock_get, certs=None,
conf_enabled=True, conf_certs=None):
fakes.stub_out_trusted_certs(self, certs=certs)
ctx = self.req.environ['nova.context']
mock_get.return_value = fakes.stub_instance_obj(ctx,
vm_state=vm_states.ACTIVE, trusted_certs=certs,
project_id=self.req_project_id, user_id=self.req_user_id)
self.flags(default_trusted_certificate_ids=conf_certs, group='glance')
if conf_enabled:
self.flags(verify_glance_signatures=True, group='glance')
self.flags(enable_certificate_validation=True, group='glance')
self.body['rebuild']['trusted_image_certificates'] = certs
self.req.body = jsonutils.dump_as_bytes(self.body)
server = self.controller._action_rebuild(
self.req, FAKE_UUID, body=self.body).obj['server']
if certs:
self.assertEqual(certs, server['trusted_image_certificates'])
else:
if conf_enabled:
# configuration file default is used
self.assertEqual(
conf_certs, server['trusted_image_certificates'])
else:
# either not set or empty
self.assertIsNone(server['trusted_image_certificates'])
def test_rebuild_server_with_trusted_certs(self):
"""Test rebuild with valid trusted_image_certificates argument"""
self._rebuild_server(
certs=['0b5d2c72-12cc-4ba6-a8d7-3ff5cc1d8cb8',
'674736e3-f25c-405c-8362-bbf991e0ce0a'])
def test_rebuild_server_without_trusted_certs(self):
"""Test rebuild without trusted image certificates"""
self._rebuild_server()
def test_rebuild_server_conf_options_turned_off_set(self):
"""Test rebuild with feature disabled and certs specified"""
self._rebuild_server(
certs=['0b5d2c72-12cc-4ba6-a8d7-3ff5cc1d8cb8'], conf_enabled=False)
def test_rebuild_server_conf_options_turned_off_empty(self):
"""Test rebuild with feature disabled"""
self._rebuild_server(conf_enabled=False)
def test_rebuild_server_default_trusted_certificates_empty(self):
"""Test rebuild with feature enabled and no certs specified"""
self._rebuild_server(conf_enabled=True)
def test_rebuild_server_default_trusted_certificates(self):
"""Test rebuild with certificate specified in configurations"""
self._rebuild_server(conf_enabled=True, conf_certs=['conf-id'])
def test_rebuild_server_with_empty_trusted_cert_id(self):
"""Make sure that we can't rebuild with an empty certificate ID"""
self.body['rebuild']['trusted_image_certificates'] = ['']
self.req.body = jsonutils.dump_as_bytes(self.body)
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
self.assertIn('is too short', str(ex))
def test_rebuild_server_with_empty_trusted_certs(self):
"""Make sure that we can't rebuild with an empty array of IDs"""
self.body['rebuild']['trusted_image_certificates'] = []
self.req.body = jsonutils.dump_as_bytes(self.body)
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
self.assertIn('is too short', str(ex))
def test_rebuild_server_with_too_many_trusted_certs(self):
"""Make sure that we can't rebuild with an array of >50 unique IDs"""
self.body['rebuild']['trusted_image_certificates'] = [
'cert{}'.format(i) for i in range(51)]
self.req.body = jsonutils.dump_as_bytes(self.body)
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
self.assertIn('is too long', str(ex))
def test_rebuild_server_with_nonunique_trusted_certs(self):
"""Make sure that we can't rebuild with a non-unique array of IDs"""
self.body['rebuild']['trusted_image_certificates'] = ['cert', 'cert']
self.req.body = jsonutils.dump_as_bytes(self.body)
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
self.assertIn('has non-unique elements', str(ex))
def test_rebuild_server_with_invalid_trusted_cert_id(self):
"""Make sure that we can't rebuild with non-string certificate IDs"""
self.body['rebuild']['trusted_image_certificates'] = [1, 2]
self.req.body = jsonutils.dump_as_bytes(self.body)
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
self.assertIn('is not of type', str(ex))
def test_rebuild_server_with_invalid_trusted_certs(self):
"""Make sure that we can't rebuild with certificates in a non-array"""
self.body['rebuild']['trusted_image_certificates'] = "not-an-array"
self.req.body = jsonutils.dump_as_bytes(self.body)
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
self.assertIn('is not of type', str(ex))
def test_rebuild_server_with_trusted_certs_pre_2_63_fails(self):
"""Make sure we can't use trusted_certs before 2.63"""
self._rebuild_server(certs=['trusted-cert-id'])
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.62')
ex = self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
self.assertIn('Additional properties are not allowed', str(ex))
@mock.patch.object(compute_api.API, 'rebuild')
def test_rebuild_server_with_cert_validation_error(
self, mock_rebuild):
mock_rebuild.side_effect = exception.CertificateValidationFailed(
cert_uuid="cert id", reason="test cert validation error")
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self._rebuild_server,
certs=['trusted-cert-id'])
self.assertIn('test cert validation error', str(ex))
class ServersControllerRebuildTestV271(ControllerTest):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
def setUp(self):
super(ServersControllerRebuildTestV271, self).setUp()
self.req = fakes.HTTPRequest.blank(self.path_action % FAKE_UUID,
use_admin_context=True)
self.req.method = 'POST'
self.req.headers["content-type"] = "application/json"
self.req_user_id = self.req.environ['nova.context'].user_id
self.req_project_id = self.req.environ['nova.context'].project_id
self.req.api_version_request = (api_version_request.
APIVersionRequest('2.71'))
self.body = {
"rebuild": {
"imageRef": self.image_uuid,
"user_data": None
}
}
@mock.patch('nova.compute.api.API.get')
def _rebuild_server(self, mock_get):
ctx = self.req.environ['nova.context']
mock_get.return_value = fakes.stub_instance_obj(ctx,
vm_state=vm_states.ACTIVE, project_id=self.req_project_id,
user_id=self.req_user_id)
server = self.controller._action_rebuild(
self.req, FAKE_UUID, body=self.body).obj['server']
return server
@mock.patch.object(InstanceGroup, 'get_by_instance_uuid',
side_effect=exception.InstanceGroupNotFound(group_uuid=FAKE_UUID))
def test_rebuild_with_server_group_not_exist(self, mock_sg_get):
server = self._rebuild_server()
self.assertEqual([], server['server_groups'])
class ServersControllerUpdateTest(ControllerTest):
def _get_request(self, body=None):
req = fakes.HTTPRequestV21.blank(self.path_with_id % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
req.body = jsonutils.dump_as_bytes(body)
fake_get = fakes.fake_compute_get(
project_id=req.environ['nova.context'].project_id,
user_id=req.environ['nova.context'].user_id)
self.mock_get.side_effect = fake_get
return req
def test_update_server_all_attributes(self):
body = {'server': {
'name': 'server_test',
}}
req = self._get_request(body)
res_dict = self.controller.update(req, FAKE_UUID, body=body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['name'], 'server_test')
def test_update_server_name(self):
body = {'server': {'name': 'server_test'}}
req = self._get_request(body)
res_dict = self.controller.update(req, FAKE_UUID, body=body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['name'], 'server_test')
def test_update_response_has_no_show_server_only_attributes(self):
# There are some old server attributes which were added only for
# GET server APIs not for PUT. GET server and PUT server share the
# same view builder method SHOW() to build the response, So make sure
# attributes which are not supposed to be included for PUT
# response are not present.
body = {'server': {'name': 'server_test'}}
req = self._get_request(body)
res_dict = self.controller.update(req, FAKE_UUID, body=body)
for field in GET_ONLY_FIELDS:
self.assertNotIn(field, res_dict['server'])
def test_update_server_name_too_long(self):
body = {'server': {'name': 'x' * 256}}
req = self._get_request(body)
self.assertRaises(exception.ValidationError, self.controller.update,
req, FAKE_UUID, body=body)
def test_update_server_name_all_blank_spaces(self):
self.stub_out('nova.db.api.instance_get',
fakes.fake_instance_get(name='server_test'))
req = fakes.HTTPRequest.blank(self.path_with_id % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {'server': {'name': ' ' * 64}}
req.body = jsonutils.dump_as_bytes(body)
self.assertRaises(exception.ValidationError, self.controller.update,
req, FAKE_UUID, body=body)
def test_update_server_name_with_spaces_in_the_middle(self):
body = {'server': {'name': 'abc def'}}
req = self._get_request(body)
self.controller.update(req, FAKE_UUID, body=body)
def test_update_server_name_with_leading_trailing_spaces(self):
self.stub_out('nova.db.api.instance_get',
fakes.fake_instance_get(name='server_test'))
req = fakes.HTTPRequest.blank(self.path_with_id % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {'server': {'name': ' abc def '}}
req.body = jsonutils.dump_as_bytes(body)
self.assertRaises(exception.ValidationError,
self.controller.update, req, FAKE_UUID, body=body)
def test_update_server_name_with_leading_trailing_spaces_compat_mode(self):
body = {'server': {'name': ' abc def '}}
req = self._get_request(body)
req.set_legacy_v2()
self.controller.update(req, FAKE_UUID, body=body)
def test_update_server_admin_password_extra_arg(self):
inst_dict = dict(name='server_test', admin_password='bacon')
body = dict(server=inst_dict)
req = fakes.HTTPRequest.blank(self.path_with_id % FAKE_UUID)
req.method = 'PUT'
req.content_type = "application/json"
req.body = jsonutils.dump_as_bytes(body)
self.assertRaises(exception.ValidationError, self.controller.update,
req, FAKE_UUID, body=body)
def test_update_server_host_id(self):
inst_dict = dict(host_id='123')
body = dict(server=inst_dict)
req = fakes.HTTPRequest.blank(self.path_with_id % FAKE_UUID)
req.method = 'PUT'
req.content_type = "application/json"
req.body = jsonutils.dump_as_bytes(body)
self.assertRaises(exception.ValidationError, self.controller.update,
req, FAKE_UUID, body=body)
def test_update_server_not_found(self):
self.mock_get.side_effect = exception.InstanceNotFound(
instance_id='fake')
body = {'server': {'name': 'server_test'}}
req = fakes.HTTPRequest.blank(self.path_with_id % FAKE_UUID)
req.method = 'PUT'
req.content_type = "application/json"
req.body = jsonutils.dump_as_bytes(body)
self.assertRaises(webob.exc.HTTPNotFound, self.controller.update,
req, FAKE_UUID, body=body)
@mock.patch.object(compute_api.API, 'update_instance')
def test_update_server_not_found_on_update(self, mock_update_instance):
def fake_update(*args, **kwargs):
raise exception.InstanceNotFound(instance_id='fake')
mock_update_instance.side_effect = fake_update
body = {'server': {'name': 'server_test'}}
req = self._get_request(body)
self.assertRaises(webob.exc.HTTPNotFound, self.controller.update,
req, FAKE_UUID, body=body)
class ServersControllerTriggerCrashDumpTest(ControllerTest):
def setUp(self):
super(ServersControllerTriggerCrashDumpTest, self).setUp()
self.instance = fakes.stub_instance_obj(None,
vm_state=vm_states.ACTIVE,
project_id=self.project_id)
def fake_get(ctrl, ctxt, uuid):
if uuid != FAKE_UUID:
raise webob.exc.HTTPNotFound(explanation='fakeout')
return self.instance
self.useFixture(
fixtures.MonkeyPatch('nova.api.openstack.compute.servers.'
'ServersController._get_instance',
fake_get))
self.req = fakes.HTTPRequest.blank(self.path_action % FAKE_UUID)
self.req.api_version_request =\
api_version_request.APIVersionRequest('2.17')
self.body = dict(trigger_crash_dump=None)
@mock.patch.object(compute_api.API, 'trigger_crash_dump')
def test_trigger_crash_dump(self, mock_trigger_crash_dump):
ctxt = self.req.environ['nova.context']
self.controller._action_trigger_crash_dump(self.req, FAKE_UUID,
body=self.body)
mock_trigger_crash_dump.assert_called_with(ctxt, self.instance)
@mock.patch.object(compute_api.API, 'trigger_crash_dump',
fake_start_stop_not_ready)
def test_trigger_crash_dump_not_ready(self):
self.assertRaises(webob.exc.HTTPConflict,
self.controller._action_trigger_crash_dump,
self.req, FAKE_UUID, body=self.body)
@mock.patch.object(compute_api.API, 'trigger_crash_dump',
fakes.fake_actions_to_locked_server)
def test_trigger_crash_dump_locked_server(self):
self.assertRaises(webob.exc.HTTPConflict,
self.controller._action_trigger_crash_dump,
self.req, FAKE_UUID, body=self.body)
@mock.patch.object(compute_api.API, 'trigger_crash_dump',
fake_start_stop_invalid_state)
def test_trigger_crash_dump_invalid_state(self):
self.assertRaises(webob.exc.HTTPConflict,
self.controller._action_trigger_crash_dump,
self.req, FAKE_UUID, body=self.body)
def test_trigger_crash_dump_with_bogus_id(self):
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._action_trigger_crash_dump,
self.req, 'test_inst', body=self.body)
def test_trigger_crash_dump_schema_invalid_type(self):
self.body['trigger_crash_dump'] = 'not null'
self.assertRaises(exception.ValidationError,
self.controller._action_trigger_crash_dump,
self.req, FAKE_UUID, body=self.body)
def test_trigger_crash_dump_schema_extra_property(self):
self.body['extra_property'] = 'extra'
self.assertRaises(exception.ValidationError,
self.controller._action_trigger_crash_dump,
self.req, FAKE_UUID, body=self.body)
class ServersControllerUpdateTestV219(ServersControllerUpdateTest):
def _get_request(self, body=None):
req = super(ServersControllerUpdateTestV219, self)._get_request(
body=body)
req.api_version_request = api_version_request.APIVersionRequest('2.19')
return req
def _update_server_desc(self, set_desc, desc=None):
body = {'server': {}}
if set_desc:
body['server']['description'] = desc
req = self._get_request()
res_dict = self.controller.update(req, FAKE_UUID, body=body)
return res_dict
def test_update_server_description(self):
res_dict = self._update_server_desc(True, 'server_desc')
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['description'], 'server_desc')
def test_update_server_empty_description(self):
res_dict = self._update_server_desc(True, '')
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['description'], '')
def test_update_server_without_description(self):
res_dict = self._update_server_desc(False)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertIsNone(res_dict['server']['description'])
def test_update_server_remove_description(self):
res_dict = self._update_server_desc(True)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertIsNone(res_dict['server']['description'])
def test_update_server_all_attributes(self):
body = {'server': {
'name': 'server_test',
'description': 'server_desc'
}}
req = self._get_request(body)
res_dict = self.controller.update(req, FAKE_UUID, body=body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['name'], 'server_test')
self.assertEqual(res_dict['server']['description'], 'server_desc')
def test_update_server_description_too_long(self):
body = {'server': {'description': 'x' * 256}}
req = self._get_request(body)
self.assertRaises(exception.ValidationError, self.controller.update,
req, FAKE_UUID, body=body)
def test_update_server_description_invalid(self):
# Invalid non-printable control char in the desc.
body = {'server': {'description': "123\0d456"}}
req = self._get_request(body)
self.assertRaises(exception.ValidationError, self.controller.update,
req, FAKE_UUID, body=body)
class ServersControllerUpdateTestV271(ServersControllerUpdateTest):
body = {'server': {'name': 'server_test'}}
def _get_request(self, body=None):
req = super(ServersControllerUpdateTestV271, self)._get_request(
body=body)
req.api_version_request = api_version_request.APIVersionRequest('2.71')
return req
@mock.patch.object(InstanceGroup, 'get_by_instance_uuid',
side_effect=exception.InstanceGroupNotFound(group_uuid=FAKE_UUID))
def test_update_with_server_group_not_exist(self, mock_sg_get):
req = self._get_request(self.body)
res_dict = self.controller.update(req, FAKE_UUID, body=self.body)
self.assertEqual([], res_dict['server']['server_groups'])
class ServerStatusTest(test.TestCase):
project_id = fakes.FAKE_PROJECT_ID
path = '/%s/servers' % project_id
path_with_id = path + '/%s'
path_action = path + '/%s/action'
def setUp(self):
super(ServerStatusTest, self).setUp()
fakes.stub_out_nw_api(self)
fakes.stub_out_secgroup_api(
self, security_groups=[{'name': 'default'}])
self.controller = servers.ServersController()
def _get_with_state(self, vm_state, task_state=None):
request = fakes.HTTPRequestV21.blank(self.path_with_id % FAKE_UUID)
self.stub_out('nova.compute.api.API.get',
fakes.fake_compute_get(
vm_state=vm_state,
task_state=task_state,
project_id=request.environ['nova.context'].project_id))
return self.controller.show(request, FAKE_UUID)
def test_active(self):
response = self._get_with_state(vm_states.ACTIVE)
self.assertEqual(response['server']['status'], 'ACTIVE')
def test_reboot(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.REBOOTING)
self.assertEqual(response['server']['status'], 'REBOOT')
def test_reboot_hard(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.REBOOTING_HARD)
self.assertEqual(response['server']['status'], 'HARD_REBOOT')
def test_rebuild(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.REBUILDING)
self.assertEqual(response['server']['status'], 'REBUILD')
def test_rebuild_error(self):
response = self._get_with_state(vm_states.ERROR)
self.assertEqual(response['server']['status'], 'ERROR')
def test_resize(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.RESIZE_PREP)
self.assertEqual(response['server']['status'], 'RESIZE')
def test_verify_resize(self):
response = self._get_with_state(vm_states.RESIZED, None)
self.assertEqual(response['server']['status'], 'VERIFY_RESIZE')
def test_revert_resize(self):
response = self._get_with_state(vm_states.RESIZED,
task_states.RESIZE_REVERTING)
self.assertEqual(response['server']['status'], 'REVERT_RESIZE')
def test_password_update(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.UPDATING_PASSWORD)
self.assertEqual(response['server']['status'], 'PASSWORD')
def test_stopped(self):
response = self._get_with_state(vm_states.STOPPED)
self.assertEqual(response['server']['status'], 'SHUTOFF')
class ServersControllerCreateTest(test.TestCase):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
project_id = fakes.FAKE_PROJECT_ID
def setUp(self):
"""Shared implementation for tests below that create instance."""
super(ServersControllerCreateTest, self).setUp()
self.flags(enable_instance_password=True, group='api')
fakes.stub_out_nw_api(self)
fakes.stub_out_key_pair_funcs(self)
self.controller = servers.ServersController()
self.useFixture(nova_fixtures.GlanceFixture(self))
self.body = {
'server': {
'name': 'server_test',
'imageRef': self.image_uuid,
'flavorRef': self.flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
'networks': [{
'uuid': 'ff608d40-75e9-48cb-b745-77bb55b5eaf2'
}],
},
}
self.bdm_v2 = [{
'no_device': None,
'source_type': 'volume',
'destination_type': 'volume',
'uuid': 'fake',
'device_name': 'vdb',
'delete_on_termination': False,
}]
self.bdm = [{
'no_device': None,
'virtual_name': 'root',
'volume_id': fakes.FAKE_UUID,
'device_name': 'vda',
'delete_on_termination': False
}]
self.req = fakes.HTTPRequest.blank('/%s/servers' % self.project_id)
self.req.method = 'POST'
self.req.headers["content-type"] = "application/json"
server = dict(name='server_test', imageRef=FAKE_UUID, flavorRef=2)
body = {'server': server}
self.req.body = encodeutils.safe_encode(jsonutils.dumps(body))
def _check_admin_password_len(self, server_dict):
"""utility function - check server_dict for admin_password length."""
self.assertEqual(CONF.password_length, len(server_dict["adminPass"]))
def _check_admin_password_missing(self, server_dict):
"""utility function - check server_dict for admin_password absence."""
self.assertNotIn("adminPass", server_dict)
def _test_create_instance(self, flavor=2):
self.stub_out('uuid.uuid4', lambda: FAKE_UUID)
image_uuid = 'c905cedb-7281-47e4-8a62-f26bc5fc4c77'
self.body['server']['imageRef'] = image_uuid
self.body['server']['flavorRef'] = flavor
self.req.body = jsonutils.dump_as_bytes(self.body)
server = self.controller.create(self.req, body=self.body).obj['server']
self._check_admin_password_len(server)
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_with_none_value_port(self):
self.body['server'] = {'networks': [{'port': None, 'uuid': FAKE_UUID}]}
self.body['server']['name'] = 'test'
self._test_create_instance()
def test_create_instance_private_flavor(self):
values = {
'name': 'fake_name',
'memory': 512,
'vcpus': 1,
'root_gb': 10,
'ephemeral_gb': 10,
'flavorid': '1324',
'swap': 0,
'rxtx_factor': 0.5,
'is_public': False,
}
flavors.create(**values)
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_instance,
flavor=1324)
self.assertEqual('Flavor 1324 could not be found.', str(ex))
def test_create_server_bad_image_uuid(self):
self.body['server']['min_count'] = 1
self.body['server']['imageRef'] = 1,
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create,
self.req, body=self.body)
def test_create_server_with_deleted_image(self):
# Get the fake image service so we can set the status to deleted
(image_service, image_id) = glance.get_remote_image_service(
context, '')
image_service.update(context, self.image_uuid, {'status': 'DELETED'})
self.addCleanup(image_service.update, context, self.image_uuid,
{'status': 'active'})
self.body['server']['flavorRef'] = 2
self.req.body = jsonutils.dump_as_bytes(self.body)
with testtools.ExpectedException(
webob.exc.HTTPBadRequest,
'Image 76fa36fc-c930-4bf3-8c8a-ea2a2420deb6 is not active.'):
self.controller.create(self.req, body=self.body)
def test_create_server_image_too_large(self):
# Get the fake image service so we can update the size of the image
(image_service, image_id) = glance.get_remote_image_service(
context, self.image_uuid)
image = image_service.show(context, image_id)
orig_size = image['size']
new_size = str(1000 * (1024 ** 3))
image_service.update(context, self.image_uuid, {'size': new_size})
self.addCleanup(image_service.update, context, self.image_uuid,
{'size': orig_size})
self.body['server']['flavorRef'] = 2
self.req.body = jsonutils.dump_as_bytes(self.body)
with testtools.ExpectedException(
webob.exc.HTTPBadRequest,
"Flavor's disk is too small for requested image."):
self.controller.create(self.req, body=self.body)
@mock.patch.object(nova_fixtures.GlanceFixture, 'show',
return_value=dict(
id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
status='active',
properties=dict(
cinder_encryption_key_id=fakes.FAKE_UUID)))
def test_create_server_image_nonbootable(self, mock_show):
self.req.body = jsonutils.dump_as_bytes(self.body)
expected_msg = ("Image {} is unacceptable: Direct booting of an image "
"uploaded from an encrypted volume is unsupported.")
with testtools.ExpectedException(
webob.exc.HTTPBadRequest,
expected_msg.format(self.image_uuid)):
self.controller.create(self.req, body=self.body)
def test_create_instance_with_image_non_uuid(self):
self.body['server']['imageRef'] = 'not-uuid'
self.assertRaises(exception.ValidationError,
self.controller.create,
self.req, body=self.body)
def test_create_instance_with_image_as_full_url(self):
image_href = ('http://localhost/v2/%s/images/'
'76fa36fc-c930-4bf3-8c8a-ea2a2420deb6' % self.project_id)
self.body['server']['imageRef'] = image_href
self.assertRaises(exception.ValidationError,
self.controller.create,
self.req, body=self.body)
def test_create_instance_with_image_as_empty_string(self):
self.body['server']['imageRef'] = ''
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
def test_create_instance_no_key_pair(self):
fakes.stub_out_key_pair_funcs(self, have_key_pair=False)
self._test_create_instance()
def _test_create_extra(self, params, no_image=False):
self.body['server']['flavorRef'] = 2
if no_image:
self.body['server'].pop('imageRef', None)
self.body['server'].update(params)
self.req.body = jsonutils.dump_as_bytes(self.body)
self.req.headers["content-type"] = "application/json"
self.controller.create(self.req, body=self.body).obj['server']
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.PortRequiresFixedIP(
port_id=uuids.port))
def test_create_instance_with_port_with_no_fixed_ips(self, mock_create):
requested_networks = [{'port': uuids.port}]
params = {'networks': requested_networks}
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
def test_create_instance_raise_user_data_too_large(self):
self.body['server']['user_data'] = (b'1' * 65536)
ex = self.assertRaises(exception.ValidationError,
self.controller.create,
self.req, body=self.body)
# Make sure the failure was about user_data and not something else.
self.assertIn('user_data', str(ex))
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.NetworkRequiresSubnet(
network_uuid=uuids.network))
def test_create_instance_with_network_with_no_subnet(self, mock_create):
requested_networks = [{'uuid': uuids.network}]
params = {'networks': requested_networks}
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.NoUniqueMatch(
"No Unique match found for ..."))
def test_create_instance_with_non_unique_secgroup_name(self, mock_create):
requested_networks = [{'uuid': uuids.network}]
params = {'networks': requested_networks,
'security_groups': [{'name': 'dup'}, {'name': 'dup'}]}
self.assertRaises(webob.exc.HTTPConflict,
self._test_create_extra, params)
def test_create_instance_secgroup_leading_trailing_spaces(self):
network = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network}]
params = {'networks': requested_networks,
'security_groups': [{'name': ' sg '}]}
self.assertRaises(exception.ValidationError,
self._test_create_extra, params)
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_secgroup_leading_trailing_spaces_compat_mode(
self, mock_create):
requested_networks = [{'uuid': uuids.network}]
params = {'networks': requested_networks,
'security_groups': [{'name': ' sg '}]}
def fake_create(*args, **kwargs):
self.assertEqual([' sg '], kwargs['security_groups'])
return (objects.InstanceList(objects=[fakes.stub_instance_obj(
self.req.environ['nova.context'])]), None)
mock_create.side_effect = fake_create
self.req.set_legacy_v2()
self._test_create_extra(params)
def test_create_instance_with_networks_disabled_neutronv2(self):
net_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
requested_networks = [{'uuid': net_uuid}]
params = {'networks': requested_networks}
old_create = compute_api.API.create
def create(*args, **kwargs):
result = [('76fa36fc-c930-4bf3-8c8a-ea2a2420deb6', None,
None, None)]
self.assertEqual(result, kwargs['requested_networks'].as_tuples())
return old_create(*args, **kwargs)
with mock.patch('nova.compute.api.API.create', create):
self._test_create_extra(params)
def test_create_instance_with_pass_disabled(self):
# test with admin passwords disabled See lp bug 921814
self.flags(enable_instance_password=False, group='api')
self.stub_out('uuid.uuid4', lambda: FAKE_UUID)
self.flags(enable_instance_password=False, group='api')
self.req.body = jsonutils.dump_as_bytes(self.body)
res = self.controller.create(self.req, body=self.body).obj
server = res['server']
self._check_admin_password_missing(server)
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_name_too_long(self):
self.body['server']['name'] = 'X' * 256
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError, self.controller.create,
self.req, body=self.body)
def test_create_instance_name_with_spaces_in_the_middle(self):
self.body['server']['name'] = 'abc def'
self.req.body = jsonutils.dump_as_bytes(self.body)
self.controller.create(self.req, body=self.body)
def test_create_instance_name_with_leading_trailing_spaces(self):
self.body['server']['name'] = ' abc def '
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_name_with_leading_trailing_spaces_in_compat_mode(
self):
self.body['server']['name'] = ' abc def '
self.req.body = jsonutils.dump_as_bytes(self.body)
self.req.set_legacy_v2()
self.controller.create(self.req, body=self.body)
def test_create_instance_name_all_blank_spaces(self):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/%s/flavors/3' % self.project_id
body = {
'server': {
'name': ' ' * 64,
'imageRef': image_uuid,
'flavorRef': flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
},
}
req = fakes.HTTPRequest.blank('/%s/servers' % self.project_id)
req.method = 'POST'
req.body = jsonutils.dump_as_bytes(body)
req.headers["content-type"] = "application/json"
self.assertRaises(exception.ValidationError,
self.controller.create, req, body=body)
def test_create_instance_az_with_leading_trailing_spaces(self):
self.body['server']['availability_zone'] = ' zone1 '
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_az_with_leading_trailing_spaces_compat_mode(self):
self.body['server']['name'] = ' abc def '
self.body['server']['availability_zone'] = ' zone1 '
self.req.body = jsonutils.dump_as_bytes(self.body)
self.req.set_legacy_v2()
with mock.patch.object(
availability_zones, 'get_availability_zones',
return_value=[' zone1 '],
) as mock_get_azs:
self.controller.create(self.req, body=self.body)
mock_get_azs.assert_called_once()
def test_create_instance_invalid_az(self):
self.body['server']['availability_zone'] = 'zone1'
self.req.body = jsonutils.dump_as_bytes(self.body)
with mock.patch.object(
availability_zones, 'get_availability_zones',
return_value=['zone2'],
) as mock_get_azs:
self.assertRaises(
webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
mock_get_azs.assert_called_once()
@mock.patch.object(objects.AggregateList, 'get_by_host')
@mock.patch.object(servers, 'LOG')
def test_create_instance_az_host(self, mock_log, mock_get_host_aggs):
"""Ensure we handle az:host format for 'availability_zone'."""
mock_get_host_aggs.return_value = objects.AggregateList(
objects=[
objects.Aggregate(metadata={'availability_zone': 'zone1'}),
],
)
self.body['server']['availability_zone'] = 'zone1:host1'
self.req.body = jsonutils.dump_as_bytes(self.body)
self.controller.create(self.req, body=self.body)
mock_get_host_aggs.assert_called_once()
mock_log.warning.assert_not_called()
@mock.patch.object(objects.AggregateList, 'get_by_host')
@mock.patch.object(servers, 'LOG')
def test_create_instance_az_host_mismatch_without_aggs(
self, mock_log, mock_get_host_aggs,
):
"""User requests an AZ but the host doesn't have one"""
mock_get_host_aggs.return_value = objects.AggregateList(objects=[])
self.body['server']['availability_zone'] = 'zone1:host1'
self.req.body = jsonutils.dump_as_bytes(self.body)
self.controller.create(self.req, body=self.body)
mock_get_host_aggs.assert_called_once()
# we should see a log since the host doesn't belong to the requested AZ
self.assertIn('bug #1934770', mock_log.warning.call_args[0][0])
@mock.patch.object(objects.AggregateList, 'get_by_host')
@mock.patch.object(servers, 'LOG')
def test_create_instance_az_host_mismatch_without_aggs_in_default_az(
self, mock_log, mock_get_host_aggs,
):
"""User requests the default AZ and host isn't in any explicit AZ"""
self.flags(default_availability_zone='zone1')
mock_get_host_aggs.return_value = objects.AggregateList(objects=[])
self.body['server']['availability_zone'] = 'zone1:host1'
self.req.body = jsonutils.dump_as_bytes(self.body)
self.controller.create(self.req, body=self.body)
mock_get_host_aggs.assert_called_once()
# we shouldn't see a log since the host is not in any aggregate and
# therefore is in the default AZ
mock_log.warning.assert_not_called()
@mock.patch.object(objects.AggregateList, 'get_by_host')
@mock.patch.object(servers, 'LOG')
def test_create_instance_az_host_mismatch_with_aggs(
self, mock_log, mock_get_host_aggs,
):
"""User requests an AZ but the host has a different one."""
mock_get_host_aggs.return_value = objects.AggregateList(
objects=[
objects.Aggregate(metadata={'availability_zone': 'zone2'}),
],
)
self.body['server']['availability_zone'] = 'zone1:host1'
self.req.body = jsonutils.dump_as_bytes(self.body)
self.controller.create(self.req, body=self.body)
mock_get_host_aggs.assert_called_once()
# we should see a log since the host belongs to a different AZ
self.assertIn('bug #1934770', mock_log.warning.call_args[0][0])
@mock.patch.object(objects.AggregateList, 'get_by_host')
@mock.patch.object(servers, 'LOG')
def test_create_instance_az_host_mismatch_with_aggs_in_default_az(
self, mock_log, mock_get_host_aggs,
):
"""User requests the default AZ and host is in aggregates without AZ"""
self.flags(default_availability_zone='zone1')
mock_get_host_aggs.return_value = objects.AggregateList(
objects=[objects.Aggregate(metadata={})],
)
self.body['server']['availability_zone'] = 'zone1:host1'
self.req.body = jsonutils.dump_as_bytes(self.body)
self.controller.create(self.req, body=self.body)
mock_get_host_aggs.assert_called_once()
# we shouldn't see a log since none of the host aggregates have an
# explicit AZ set and the host is therefore in the default AZ
mock_log.warning.assert_not_called()
def test_create_instance_invalid_az_format(self):
self.body['server']['availability_zone'] = 'invalid::::zone'
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
def test_create_instance_invalid_az_as_int(self):
self.body['server']['availability_zone'] = 123
self.assertRaises(exception.ValidationError,
self.controller.create,
self.req, body=self.body)
def test_create_instance(self):
self.stub_out('uuid.uuid4', lambda: FAKE_UUID)
self.req.body = jsonutils.dump_as_bytes(self.body)
res = self.controller.create(self.req, body=self.body).obj
server = res['server']
self._check_admin_password_len(server)
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_pass_disabled(self):
self.stub_out('uuid.uuid4', lambda: FAKE_UUID)
self.flags(enable_instance_password=False, group='api')
self.req.body = jsonutils.dump_as_bytes(self.body)
res = self.controller.create(self.req, body=self.body).obj
server = res['server']
self._check_admin_password_missing(server)
self.assertEqual(FAKE_UUID, server['id'])
@mock.patch('nova.virt.hardware.numa_get_constraints')
def _test_create_instance_numa_topology_wrong(self, exc,
numa_constraints_mock):
numa_constraints_mock.side_effect = exc(**{
'name': None,
'source': 'flavor',
'requested': 'dummy',
'available': str(objects.fields.CPUAllocationPolicy.ALL),
'cpunum': 0,
'cpumax': 0,
'cpuset': None,
'memsize': 0,
'memtotal': 0})
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
def test_create_instance_numa_topology_wrong(self):
for exc in [exception.ImageNUMATopologyIncomplete,
exception.ImageNUMATopologyForbidden,
exception.ImageNUMATopologyAsymmetric,
exception.ImageNUMATopologyCPUOutOfRange,
exception.ImageNUMATopologyCPUDuplicates,
exception.ImageNUMATopologyCPUsUnassigned,
exception.InvalidCPUAllocationPolicy,
exception.InvalidCPUThreadAllocationPolicy,
exception.ImageNUMATopologyMemoryOutOfRange]:
self._test_create_instance_numa_topology_wrong(exc)
def test_create_instance_too_much_metadata(self):
self.flags(metadata_items=1, group='quota')
self.body['server']['metadata']['vote'] = 'fiddletown'
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPForbidden,
self.controller.create, self.req, body=self.body)
def test_create_instance_metadata_key_too_long(self):
self.flags(metadata_items=1, group='quota')
self.body['server']['metadata'] = {('a' * 260): '12345'}
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_metadata_value_too_long(self):
self.flags(metadata_items=1, group='quota')
self.body['server']['metadata'] = {'key1': ('a' * 260)}
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_metadata_key_blank(self):
self.flags(metadata_items=1, group='quota')
self.body['server']['metadata'] = {'': 'abcd'}
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_metadata_not_dict(self):
self.flags(metadata_items=1, group='quota')
self.body['server']['metadata'] = 'string'
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_metadata_key_not_string(self):
self.flags(metadata_items=1, group='quota')
self.body['server']['metadata'] = {1: 'test'}
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_metadata_value_not_string(self):
self.flags(metadata_items=1, group='quota')
self.body['server']['metadata'] = {'test': ['a', 'list']}
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_user_data_malformed_bad_request(self):
params = {'user_data': 'u1234'}
self.assertRaises(exception.ValidationError,
self._test_create_extra, params)
def _create_instance_body_of_config_drive(self, param):
def create(*args, **kwargs):
self.assertIn('config_drive', kwargs)
return old_create(*args, **kwargs)
old_create = compute_api.API.create
self.stub_out('nova.compute.api.API.create', create)
self.body['server']['config_drive'] = param
self.req.body = jsonutils.dump_as_bytes(self.body)
def test_create_instance_with_config_drive(self):
param = True
self._create_instance_body_of_config_drive(param)
self.controller.create(self.req, body=self.body).obj
def test_create_instance_with_config_drive_as_boolean_string(self):
param = 'false'
self._create_instance_body_of_config_drive(param)
self.controller.create(self.req, body=self.body).obj
def test_create_instance_with_bad_config_drive(self):
param = 12345
self._create_instance_body_of_config_drive(param)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_without_config_drive(self):
def create(*args, **kwargs):
self.assertIsNone(kwargs['config_drive'])
return old_create(*args, **kwargs)
old_create = compute_api.API.create
self.stub_out('nova.compute.api.API.create', create)
self.req.body = jsonutils.dump_as_bytes(self.body)
self.controller.create(self.req, body=self.body).obj
def test_create_instance_with_empty_config_drive(self):
param = ''
self._create_instance_body_of_config_drive(param)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def _test_create(self, params, no_image=False):
self. body['server'].update(params)
if no_image:
del self.body['server']['imageRef']
self.req.body = jsonutils.dump_as_bytes(self.body)
self.controller.create(self.req, body=self.body).obj['server']
def test_create_instance_with_volumes_enabled_no_image(self):
"""Test that the create will fail if there is no image
and no bdms supplied in the request
"""
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertNotIn('imageRef', kwargs)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create, {}, no_image=True)
@mock.patch('nova.compute.api.API._get_volumes_for_bdms')
@mock.patch.object(compute_api.API, '_validate_bdm')
@mock.patch('nova.block_device.get_bdm_image_metadata')
def test_create_instance_with_bdms_and_no_image(
self, mock_bdm_image_metadata, mock_validate_bdm, mock_get_vols):
mock_bdm_image_metadata.return_value = {}
mock_validate_bdm.return_value = True
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertThat(
block_device.BlockDeviceDict(self.bdm_v2[0]),
matchers.DictMatches(kwargs['block_device_mapping'][0])
)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
params = {'block_device_mapping_v2': self.bdm_v2}
self._test_create(params, no_image=True)
mock_validate_bdm.assert_called_once()
mock_bdm_image_metadata.assert_called_once_with(
mock.ANY, mock.ANY, mock.ANY, mock.ANY, False)
@mock.patch('nova.compute.api.API._get_volumes_for_bdms')
@mock.patch.object(compute_api.API, '_validate_bdm')
@mock.patch('nova.block_device.get_bdm_image_metadata')
def test_create_instance_with_bdms_and_empty_imageRef(
self, mock_bdm_image_metadata, mock_validate_bdm, mock_get_volumes):
mock_bdm_image_metadata.return_value = {}
mock_validate_bdm.return_value = True
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertThat(
block_device.BlockDeviceDict(self.bdm_v2[0]),
matchers.DictMatches(kwargs['block_device_mapping'][0])
)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
params = {'block_device_mapping_v2': self.bdm_v2,
'imageRef': ''}
self._test_create(params)
def test_create_instance_with_imageRef_as_full_url(self):
bdm = [{'device_name': 'foo'}]
image_href = ('http://localhost/v2/%s/images/'
'76fa36fc-c930-4bf3-8c8a-ea2a2420deb6' % self.project_id)
params = {'block_device_mapping_v2': bdm,
'imageRef': image_href}
self.assertRaises(exception.ValidationError,
self._test_create, params)
def test_create_instance_with_non_uuid_imageRef(self):
bdm = [{'device_name': 'foo'}]
params = {'block_device_mapping_v2': bdm,
'imageRef': '123123abcd'}
self.assertRaises(exception.ValidationError,
self._test_create, params)
def test_create_instance_with_invalid_bdm_in_2nd_dict(self):
bdm_1st = {"source_type": "image", "delete_on_termination": True,
"boot_index": 0,
"uuid": "2ff3a1d3-ed70-4c3f-94ac-941461153bc0",
"destination_type": "local"}
bdm_2nd = {"source_type": "volume",
"uuid": "99d92140-3d0c-4ea5-a49c-f94c38c607f0",
"destination_type": "invalid"}
bdm = [bdm_1st, bdm_2nd]
params = {'block_device_mapping_v2': bdm,
'imageRef': '2ff3a1d3-ed70-4c3f-94ac-941461153bc0'}
self.assertRaises(exception.ValidationError,
self._test_create, params)
def test_create_instance_with_boot_index_none_ok(self):
"""Tests creating a server with two block devices. One is the boot
device and the other is a non-bootable device.
"""
# From the docs:
# To disable a device from booting, set the boot index to a negative
# value or use the default boot index value, which is None. The
# simplest usage is, set the boot index of the boot device to 0 and use
# the default boot index value, None, for any other devices.
bdms = [
# This is the bootable device that would create a 20GB cinder
# volume from the given image.
{
'source_type': 'image',
'destination_type': 'volume',
'boot_index': 0,
'uuid': '155d900f-4e14-4e4c-a73d-069cbf4541e6',
'volume_size': 20
},
# This is the non-bootable 10GB ext4 ephemeral block device.
{
'source_type': 'blank',
'destination_type': 'local',
'boot_index': None,
# If 'guest_format' is 'swap' then a swap device is created.
'guest_format': 'ext4'
}
]
params = {'block_device_mapping_v2': bdms}
self._test_create(params, no_image=True)
def test_create_instance_with_boot_index_none_image_local_fails(self):
"""Tests creating a server with a local image-based block device which
has a boot_index of None which is invalid.
"""
bdms = [{
'source_type': 'image',
'destination_type': 'local',
'boot_index': None,
'uuid': '155d900f-4e14-4e4c-a73d-069cbf4541e6'
}]
params = {'block_device_mapping_v2': bdms}
self.assertRaises(webob.exc.HTTPBadRequest, self._test_create,
params, no_image=True)
def test_create_instance_with_invalid_boot_index(self):
bdm = [{"source_type": "image", "delete_on_termination": True,
"boot_index": 'invalid',
"uuid": "2ff3a1d3-ed70-4c3f-94ac-941461153bc0",
"destination_type": "local"}]
params = {'block_device_mapping_v2': bdm,
'imageRef': '2ff3a1d3-ed70-4c3f-94ac-941461153bc0'}
self.assertRaises(exception.ValidationError,
self._test_create, params)
def test_create_instance_with_device_name_not_string(self):
self.bdm_v2[0]['device_name'] = 123
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm_v2)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
params = {'block_device_mapping_v2': self.bdm_v2}
self.assertRaises(exception.ValidationError,
self._test_create, params, no_image=True)
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_with_bdm_param_not_list(self, mock_create):
self.params = {'block_device_mapping': '/dev/vdb'}
self.assertRaises(exception.ValidationError,
self._test_create, self.params)
def test_create_instance_with_device_name_empty(self):
self.bdm_v2[0]['device_name'] = ''
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm_v2)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
params = {'block_device_mapping_v2': self.bdm_v2}
self.assertRaises(exception.ValidationError,
self._test_create, params, no_image=True)
def test_create_instance_with_device_name_too_long(self):
self.bdm_v2[0]['device_name'] = 'a' * 256
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm_v2)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
params = {'block_device_mapping_v2': self.bdm_v2}
self.assertRaises(exception.ValidationError,
self._test_create, params, no_image=True)
def test_create_instance_with_space_in_device_name(self):
self.bdm_v2[0]['device_name'] = 'v da'
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertTrue(kwargs['legacy_bdm'])
self.assertEqual(kwargs['block_device_mapping'], self.bdm_v2)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
params = {'block_device_mapping_v2': self.bdm_v2}
self.assertRaises(exception.ValidationError,
self._test_create, params, no_image=True)
def test_create_instance_with_invalid_size(self):
self.bdm_v2[0]['volume_size'] = 'hello world'
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm_v2)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
params = {'block_device_mapping_v2': self.bdm_v2}
self.assertRaises(exception.ValidationError,
self._test_create, params, no_image=True)
def _test_create_instance_with_destination_type_error(self,
destination_type):
self.bdm_v2[0]['destination_type'] = destination_type
params = {'block_device_mapping_v2': self.bdm_v2}
self.assertRaises(exception.ValidationError,
self._test_create, params, no_image=True)
def test_create_instance_with_destination_type_empty_string(self):
self._test_create_instance_with_destination_type_error('')
def test_create_instance_with_invalid_destination_type(self):
self._test_create_instance_with_destination_type_error('fake')
@mock.patch('nova.compute.api.API._get_volumes_for_bdms')
@mock.patch.object(compute_api.API, '_validate_bdm')
def test_create_instance_bdm(self, mock_validate_bdm, mock_get_volumes):
bdm = [{
'source_type': 'volume',
'device_name': 'fake_dev',
'uuid': 'fake_vol'
}]
bdm_expected = [{
'source_type': 'volume',
'device_name': 'fake_dev',
'volume_id': 'fake_vol'
}]
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertFalse(kwargs['legacy_bdm'])
for expected, received in zip(bdm_expected,
kwargs['block_device_mapping']):
self.assertThat(block_device.BlockDeviceDict(expected),
matchers.DictMatches(received))
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
params = {'block_device_mapping_v2': bdm}
self._test_create(params, no_image=True)
mock_validate_bdm.assert_called_once()
@mock.patch('nova.compute.api.API._get_volumes_for_bdms')
@mock.patch.object(compute_api.API, '_validate_bdm')
def test_create_instance_bdm_missing_device_name(self, mock_validate_bdm,
mock_get_volumes):
del self.bdm_v2[0]['device_name']
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertFalse(kwargs['legacy_bdm'])
self.assertNotIn(None,
kwargs['block_device_mapping'][0]['device_name'])
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
params = {'block_device_mapping_v2': self.bdm_v2}
self._test_create(params, no_image=True)
mock_validate_bdm.assert_called_once()
@mock.patch.object(
block_device.BlockDeviceDict, '_validate',
side_effect=exception.InvalidBDMFormat(details='Wrong BDM'))
def test_create_instance_bdm_validation_error(self, mock_validate):
params = {'block_device_mapping_v2': self.bdm_v2}
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create, params, no_image=True)
@mock.patch('nova.block_device.get_bdm_image_metadata')
def test_create_instance_non_bootable_volume_fails(self, fake_bdm_meta):
params = {'block_device_mapping_v2': self.bdm_v2}
fake_bdm_meta.side_effect = exception.InvalidBDMVolumeNotBootable(id=1)
self.assertRaises(webob.exc.HTTPBadRequest, self._test_create, params,
no_image=True)
@mock.patch('nova.compute.api.API._get_volumes_for_bdms')
def test_create_instance_bdm_api_validation_fails(self, mock_get_volumes):
self.validation_fail_test_validate_called = False
self.validation_fail_instance_destroy_called = False
bdm_exceptions = ((exception.InvalidBDMSnapshot, {'id': 'fake'}),
(exception.InvalidBDMVolume, {'id': 'fake'}),
(exception.InvalidBDMImage, {'id': 'fake'}),
(exception.InvalidBDMBootSequence, {}),
(exception.InvalidBDMLocalsLimit, {}),
(exception.InvalidBDMDiskBus, {'disk_bus': 'foo'}))
ex_iter = iter(bdm_exceptions)
def _validate_bdm(*args, **kwargs):
self.validation_fail_test_validate_called = True
ex, kargs = next(ex_iter)
raise ex(**kargs)
def _instance_destroy(*args, **kwargs):
self.validation_fail_instance_destroy_called = True
self.stub_out('nova.compute.api.API._validate_bdm', _validate_bdm)
self.stub_out('nova.objects.Instance.destroy', _instance_destroy)
for _unused in range(len(bdm_exceptions)):
params = {'block_device_mapping_v2':
[self.bdm_v2[0].copy()]}
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create, params)
self.assertTrue(self.validation_fail_test_validate_called)
self.assertFalse(self.validation_fail_instance_destroy_called)
self.validation_fail_test_validate_called = False
self.validation_fail_instance_destroy_called = False
@mock.patch('nova.compute.api.API._get_volumes_for_bdms')
@mock.patch.object(compute_api.API, '_validate_bdm')
def _test_create_bdm(self, params, mock_validate_bdm, mock_get_volumes,
no_image=False):
self.body['server'].update(params)
if no_image:
del self.body['server']['imageRef']
self.req.body = jsonutils.dump_as_bytes(self.body)
self.controller.create(self.req, body=self.body).obj['server']
mock_validate_bdm.assert_called_once_with(
test.MatchType(fakes.FakeRequestContext),
test.MatchType(objects.Instance),
test.MatchType(objects.Flavor),
test.MatchType(objects.BlockDeviceMappingList),
{},
mock_get_volumes.return_value,
False)
def test_create_instance_with_volumes_enabled(self):
params = {'block_device_mapping': self.bdm}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self._test_create_bdm(params)
@mock.patch('nova.block_device.get_bdm_image_metadata')
def test_create_instance_with_volumes_enabled_and_bdms_no_image(
self, mock_get_bdm_image_metadata):
"""Test that the create works if there is no image supplied but
os-volumes extension is enabled and bdms are supplied
"""
volume = {
'id': uuids.volume_id,
'status': 'active',
'volume_image_metadata':
{'test_key': 'test_value'}
}
mock_get_bdm_image_metadata.return_value = volume
params = {'block_device_mapping': self.bdm}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm)
self.assertNotIn('imageRef', kwargs)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self._test_create_bdm(params, no_image=True)
mock_get_bdm_image_metadata.assert_called_once_with(
mock.ANY, mock.ANY, mock.ANY, self.bdm, True)
@mock.patch('nova.block_device.get_bdm_image_metadata')
def test_create_instance_with_imageRef_as_empty_string(
self, mock_bdm_image_metadata):
volume = {
'id': uuids.volume_id,
'status': 'active',
'volume_image_metadata':
{'test_key': 'test_value'}
}
mock_bdm_image_metadata.return_value = volume
params = {'block_device_mapping': self.bdm,
'imageRef': ''}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self._test_create_bdm(params)
def test_create_instance_with_imageRef_as_full_url_legacy_bdm(self):
bdm = [{
'volume_id': fakes.FAKE_UUID,
'device_name': 'vda'
}]
image_href = ('http://localhost/v2/%s/images/'
'76fa36fc-c930-4bf3-8c8a-ea2a2420deb6' % self.project_id)
params = {'block_device_mapping': bdm,
'imageRef': image_href}
self.assertRaises(exception.ValidationError,
self._test_create_bdm, params)
def test_create_instance_with_non_uuid_imageRef_legacy_bdm(self):
bdm = [{
'volume_id': fakes.FAKE_UUID,
'device_name': 'vda'
}]
params = {'block_device_mapping': bdm,
'imageRef': 'bad-format'}
self.assertRaises(exception.ValidationError,
self._test_create_bdm, params)
@mock.patch('nova.block_device.get_bdm_image_metadata')
def test_create_instance_non_bootable_volume_fails_legacy_bdm(
self, fake_bdm_meta):
bdm = [{
'volume_id': fakes.FAKE_UUID,
'device_name': 'vda'
}]
params = {'block_device_mapping': bdm}
fake_bdm_meta.side_effect = exception.InvalidBDMVolumeNotBootable(id=1)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_bdm, params, no_image=True)
def test_create_instance_with_device_name_not_string_legacy_bdm(self):
self.bdm[0]['device_name'] = 123
old_create = compute_api.API.create
self.params = {'block_device_mapping': self.bdm}
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self.assertRaises(exception.ValidationError,
self._test_create_bdm, self.params)
def test_create_instance_with_snapshot_volume_id_none(self):
old_create = compute_api.API.create
bdm = [{
'no_device': None,
'snapshot_id': None,
'volume_id': None,
'device_name': 'vda',
'delete_on_termination': False
}]
self.params = {'block_device_mapping': bdm}
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], bdm)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self.assertRaises(exception.ValidationError,
self._test_create_bdm, self.params)
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_with_legacy_bdm_param_not_list(self, mock_create):
self.params = {'block_device_mapping': '/dev/vdb'}
self.assertRaises(exception.ValidationError,
self._test_create_bdm, self.params)
def test_create_instance_with_device_name_empty_legacy_bdm(self):
self.bdm[0]['device_name'] = ''
params = {'block_device_mapping': self.bdm}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self.assertRaises(exception.ValidationError,
self._test_create_bdm, params)
def test_create_instance_with_device_name_too_long_legacy_bdm(self):
self.bdm[0]['device_name'] = 'a' * 256,
params = {'block_device_mapping': self.bdm}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], self.bdm)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self.assertRaises(exception.ValidationError,
self._test_create_bdm, params)
def test_create_instance_with_space_in_device_name_legacy_bdm(self):
self.bdm[0]['device_name'] = 'vd a',
params = {'block_device_mapping': self.bdm}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertTrue(kwargs['legacy_bdm'])
self.assertEqual(kwargs['block_device_mapping'], self.bdm)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self.assertRaises(exception.ValidationError,
self._test_create_bdm, params)
def _test_create_bdm_instance_with_size_error(self, size):
bdm = [{'delete_on_termination': True,
'device_name': 'vda',
'volume_size': size,
'volume_id': '11111111-1111-1111-1111-111111111111'}]
params = {'block_device_mapping': bdm}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['block_device_mapping'], bdm)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self.assertRaises(exception.ValidationError,
self._test_create_bdm, params)
def test_create_instance_with_invalid_size_legacy_bdm(self):
self._test_create_bdm_instance_with_size_error("hello world")
def test_create_instance_with_size_empty_string(self):
self._test_create_bdm_instance_with_size_error('')
def test_create_instance_with_size_zero(self):
self._test_create_bdm_instance_with_size_error("0")
def test_create_instance_with_size_greater_than_limit(self):
self._test_create_bdm_instance_with_size_error(db.MAX_INT + 1)
def test_create_instance_with_bdm_delete_on_termination(self):
bdm = [{'device_name': 'foo1', 'volume_id': fakes.FAKE_UUID,
'delete_on_termination': 'True'},
{'device_name': 'foo2', 'volume_id': fakes.FAKE_UUID,
'delete_on_termination': True},
{'device_name': 'foo3', 'volume_id': fakes.FAKE_UUID,
'delete_on_termination': 'False'},
{'device_name': 'foo4', 'volume_id': fakes.FAKE_UUID,
'delete_on_termination': False},
{'device_name': 'foo5', 'volume_id': fakes.FAKE_UUID,
'delete_on_termination': False}]
expected_bdm = [
{'device_name': 'foo1', 'volume_id': fakes.FAKE_UUID,
'delete_on_termination': True},
{'device_name': 'foo2', 'volume_id': fakes.FAKE_UUID,
'delete_on_termination': True},
{'device_name': 'foo3', 'volume_id': fakes.FAKE_UUID,
'delete_on_termination': False},
{'device_name': 'foo4', 'volume_id': fakes.FAKE_UUID,
'delete_on_termination': False},
{'device_name': 'foo5', 'volume_id': fakes.FAKE_UUID,
'delete_on_termination': False}]
params = {'block_device_mapping': bdm}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(expected_bdm, kwargs['block_device_mapping'])
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self._test_create_bdm(params)
def test_create_instance_with_bdm_delete_on_termination_invalid_2nd(self):
bdm = [{'device_name': 'foo1', 'volume_id': fakes.FAKE_UUID,
'delete_on_termination': 'True'},
{'device_name': 'foo2', 'volume_id': fakes.FAKE_UUID,
'delete_on_termination': 'invalid'}]
params = {'block_device_mapping': bdm}
self.assertRaises(exception.ValidationError,
self._test_create_bdm, params)
def test_create_instance_decide_format_legacy(self):
bdm = [{'device_name': 'foo1',
'volume_id': fakes.FAKE_UUID,
'delete_on_termination': True}]
expected_legacy_flag = True
old_create = compute_api.API.create
def create(*args, **kwargs):
legacy_bdm = kwargs.get('legacy_bdm', True)
self.assertEqual(legacy_bdm, expected_legacy_flag)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self._test_create_bdm({})
params = {'block_device_mapping': bdm}
self._test_create_bdm(params)
def test_create_instance_both_bdm_formats(self):
bdm = [{'device_name': 'foo'}]
bdm_v2 = [{'source_type': 'volume',
'uuid': 'fake_vol'}]
params = {'block_device_mapping': bdm,
'block_device_mapping_v2': bdm_v2}
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_bdm, params)
def test_create_instance_invalid_key_name(self):
self.body['server']['key_name'] = 'nonexistentkey'
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
def test_create_instance_valid_key_name(self):
self.stub_out('uuid.uuid4', lambda: FAKE_UUID)
self.body['server']['key_name'] = 'key'
self.req.body = jsonutils.dump_as_bytes(self.body)
res = self.controller.create(self.req, body=self.body).obj
self.assertEqual(FAKE_UUID, res["server"]["id"])
self._check_admin_password_len(res["server"])
def test_create_server_keypair_name_with_leading_trailing(self):
self.body['server']['key_name'] = ' abc '
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
@mock.patch.object(compute_api.API, 'create')
def test_create_server_keypair_name_with_leading_trailing_compat_mode(
self, mock_create):
params = {'key_name': ' abc '}
def fake_create(*args, **kwargs):
self.assertEqual(' abc ', kwargs['key_name'])
return (objects.InstanceList(objects=[fakes.stub_instance_obj(
self.req.environ['nova.context'])]), None)
mock_create.side_effect = fake_create
self.req.set_legacy_v2()
self._test_create_extra(params)
def test_create_instance_invalid_flavor_href(self):
flavor_ref = 'http://localhost/v2/flavors/asdf'
self.body['server']['flavorRef'] = flavor_ref
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
def test_create_instance_invalid_flavor_id_int(self):
flavor_ref = -1
self.body['server']['flavorRef'] = flavor_ref
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
@mock.patch.object(nova.compute.flavors, 'get_flavor_by_flavor_id',
return_value=objects.Flavor())
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_with_non_existing_snapshot_id(
self, mock_create,
mock_get_flavor_by_flavor_id):
mock_create.side_effect = exception.SnapshotNotFound(snapshot_id='123')
self.body['server'] = {'name': 'server_test',
'flavorRef': self.flavor_ref,
'block_device_mapping_v2':
[{'source_type': 'snapshot',
'uuid': '123'}]}
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
def test_create_instance_invalid_flavor_id_empty(self):
flavor_ref = ""
self.body['server']['flavorRef'] = flavor_ref
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_bad_flavor_href(self):
flavor_ref = 'http://localhost/v2/flavors/17'
self.body['server']['flavorRef'] = flavor_ref
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
def test_create_instance_local_href(self):
self.stub_out('uuid.uuid4', lambda: FAKE_UUID)
self.req.body = jsonutils.dump_as_bytes(self.body)
res = self.controller.create(self.req, body=self.body).obj
server = res['server']
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_admin_password(self):
self.body['server']['flavorRef'] = 3
self.body['server']['adminPass'] = 'testpass'
self.req.body = jsonutils.dump_as_bytes(self.body)
res = self.controller.create(self.req, body=self.body).obj
server = res['server']
self.assertEqual(server['adminPass'],
self.body['server']['adminPass'])
def test_create_instance_admin_password_pass_disabled(self):
self.flags(enable_instance_password=False, group='api')
self.body['server']['flavorRef'] = 3
self.body['server']['adminPass'] = 'testpass'
self.req.body = jsonutils.dump_as_bytes(self.body)
res = self.controller.create(self.req, body=self.body).obj
self.assertIn('server', res)
self.assertIn('adminPass', self.body['server'])
def test_create_instance_admin_password_empty(self):
self.body['server']['flavorRef'] = 3
self.body['server']['adminPass'] = ''
self.req.body = jsonutils.dump_as_bytes(self.body)
# The fact that the action doesn't raise is enough validation
self.controller.create(self.req, body=self.body)
def test_create_location(self):
self.stub_out('uuid.uuid4', lambda: FAKE_UUID)
selfhref = 'http://localhost/v2/%s/servers/%s' % (self.project_id,
FAKE_UUID)
self.req.body = jsonutils.dump_as_bytes(self.body)
robj = self.controller.create(self.req, body=self.body)
self.assertEqual(encodeutils.safe_decode(robj['Location']), selfhref)
@mock.patch('nova.objects.Quotas.get_all_by_project')
@mock.patch('nova.objects.Quotas.get_all_by_project_and_user')
@mock.patch('nova.objects.Quotas.count_as_dict')
def _do_test_create_instance_above_quota(self, resource, allowed,
quota, expected_msg, mock_count, mock_get_all_pu,
mock_get_all_p):
count = {'project': {}, 'user': {}}
for res in ('instances', 'ram', 'cores'):
if res == resource:
value = quota - allowed
count['project'][res] = count['user'][res] = value
else:
count['project'][res] = count['user'][res] = 0
mock_count.return_value = count
mock_get_all_p.return_value = {'project_id': fakes.FAKE_PROJECT_ID}
mock_get_all_pu.return_value = {'project_id': fakes.FAKE_PROJECT_ID,
'user_id': 'fake_user'}
if resource in db_api.PER_PROJECT_QUOTAS:
mock_get_all_p.return_value[resource] = quota
else:
mock_get_all_pu.return_value[resource] = quota
fakes.stub_out_instance_quota(self, allowed, quota, resource)
self.body['server']['flavorRef'] = 3
self.req.body = jsonutils.dump_as_bytes(self.body)
try:
self.controller.create(self.req, body=self.body).obj['server']
self.fail('expected quota to be exceeded')
except webob.exc.HTTPForbidden as e:
self.assertEqual(e.explanation, expected_msg)
def test_create_instance_above_quota_instances(self):
msg = ('Quota exceeded for instances: Requested 1, but'
' already used 10 of 10 instances')
self._do_test_create_instance_above_quota('instances', 0, 10, msg)
def test_create_instance_above_quota_ram(self):
msg = ('Quota exceeded for ram: Requested 4096, but'
' already used 8192 of 10240 ram')
self._do_test_create_instance_above_quota('ram', 2048, 10 * 1024, msg)
def test_create_instance_above_quota_cores(self):
msg = ('Quota exceeded for cores: Requested 2, but'
' already used 9 of 10 cores')
self._do_test_create_instance_above_quota('cores', 1, 10, msg)
@mock.patch.object(fakes.QUOTAS, 'limit_check')
def test_create_instance_above_quota_server_group_members(
self, mock_limit_check):
ctxt = self.req.environ['nova.context']
fake_group = objects.InstanceGroup(ctxt)
fake_group.project_id = ctxt.project_id
fake_group.user_id = ctxt.user_id
fake_group.create()
real_count = fakes.QUOTAS.count_as_dict
def fake_count(context, name, group, user_id):
if name == 'server_group_members':
self.assertEqual(group.uuid, fake_group.uuid)
self.assertEqual(user_id,
self.req.environ['nova.context'].user_id)
return {'user': {'server_group_members': 10}}
else:
return real_count(context, name, group, user_id)
def fake_limit_check(context, **kwargs):
if 'server_group_members' in kwargs:
raise exception.OverQuota(overs={})
def fake_instance_destroy(context, uuid, constraint):
return fakes.stub_instance(1)
mock_limit_check.side_effect = fake_limit_check
self.stub_out('nova.db.api.instance_destroy', fake_instance_destroy)
self.body['os:scheduler_hints'] = {'group': fake_group.uuid}
self.req.body = jsonutils.dump_as_bytes(self.body)
expected_msg = "Quota exceeded, too many servers in group"
try:
with mock.patch.object(fakes.QUOTAS, 'count_as_dict',
side_effect=fake_count):
self.controller.create(self.req, body=self.body).obj
self.fail('expected quota to be exceeded')
except webob.exc.HTTPForbidden as e:
self.assertEqual(e.explanation, expected_msg)
def test_create_instance_with_group_hint(self):
ctxt = self.req.environ['nova.context']
test_group = objects.InstanceGroup(ctxt)
test_group.project_id = ctxt.project_id
test_group.user_id = ctxt.user_id
test_group.create()
def fake_instance_destroy(context, uuid, constraint):
return fakes.stub_instance(1)
self.stub_out('nova.db.api.instance_destroy', fake_instance_destroy)
self.body['os:scheduler_hints'] = {'group': test_group.uuid}
self.req.body = jsonutils.dump_as_bytes(self.body)
server = self.controller.create(self.req, body=self.body).obj['server']
test_group = objects.InstanceGroup.get_by_uuid(ctxt, test_group.uuid)
self.assertIn(server['id'], test_group.members)
def _test_create_instance_with_group_hint(self, hint,
hint_name='os:scheduler_hints'):
def fake_instance_destroy(context, uuid, constraint):
return fakes.stub_instance(1)
def fake_create(*args, **kwargs):
self.assertEqual(kwargs['scheduler_hints'], hint)
return ([fakes.stub_instance(1)], '')
self.stub_out('nova.compute.api.API.create', fake_create)
self.stub_out('nova.db.instance_destroy', fake_instance_destroy)
self.body[hint_name] = hint
self.req.body = jsonutils.dump_as_bytes(self.body)
return self.controller.create(self.req, body=self.body).obj['server']
def test_create_instance_with_group_hint_legacy(self):
self._test_create_instance_with_group_hint(
{'different_host': '9c47bf55-e9d8-42da-94ab-7f9e80cd1857'},
hint_name='OS-SCH-HNT:scheduler_hints')
def test_create_server_with_different_host_hint(self):
self._test_create_instance_with_group_hint(
{'different_host': '9c47bf55-e9d8-42da-94ab-7f9e80cd1857'})
self._test_create_instance_with_group_hint(
{'different_host': ['9c47bf55-e9d8-42da-94ab-7f9e80cd1857',
'82412fa6-0365-43a9-95e4-d8b20e00c0de']})
def test_create_instance_with_group_hint_group_not_found(self):
def fake_instance_destroy(context, uuid, constraint):
return fakes.stub_instance(1)
self.stub_out('nova.db.api.instance_destroy', fake_instance_destroy)
self.body['os:scheduler_hints'] = {
'group': '5b674f73-c8cf-40ef-9965-3b6fe4b304b1'}
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
def test_create_instance_with_group_hint_wrong_uuid_format(self):
self.body['os:scheduler_hints'] = {
'group': 'non-uuid'}
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_server_bad_hints_non_dict(self):
sch_hints = ['os:scheduler_hints', 'OS-SCH-HNT:scheduler_hints']
for hint in sch_hints:
self.body[hint] = 'non-dict'
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_server_bad_hints_long_group(self):
self.body['os:scheduler_hints'] = {
'group': 'a' * 256}
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_server_with_bad_different_host_hint(self):
self.body['os:scheduler_hints'] = {
'different_host': 'non-server-id'}
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
self.body['os:scheduler_hints'] = {
'different_host': ['non-server-id01', 'non-server-id02']}
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.PortInUse(port_id=uuids.port))
def test_create_instance_with_port_in_use(self, mock_create):
requested_networks = [{'uuid': uuids.network, 'port': uuids.port}]
params = {'networks': requested_networks}
self.assertRaises(webob.exc.HTTPConflict,
self._test_create_extra, params)
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_public_network_non_admin(self, mock_create):
public_network_uuid = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
params = {'networks': [{'uuid': public_network_uuid}]}
self.req.body = jsonutils.dump_as_bytes(self.body)
mock_create.side_effect = exception.ExternalNetworkAttachForbidden(
network_uuid=public_network_uuid)
self.assertRaises(webob.exc.HTTPForbidden,
self._test_create_extra, params)
def test_multiple_create_with_string_type_min_and_max(self):
min_count = '2'
max_count = '3'
params = {
'min_count': min_count,
'max_count': max_count,
}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertIsInstance(kwargs['min_count'], int)
self.assertIsInstance(kwargs['max_count'], int)
self.assertEqual(kwargs['min_count'], 2)
self.assertEqual(kwargs['max_count'], 3)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self._test_create_extra(params)
def test_create_instance_with_multiple_create_enabled(self):
min_count = 2
max_count = 3
params = {
'min_count': min_count,
'max_count': max_count,
}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['min_count'], 2)
self.assertEqual(kwargs['max_count'], 3)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self._test_create_extra(params)
def test_create_instance_invalid_negative_min(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': -1,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
self.assertRaises(exception.ValidationError,
self.controller.create,
self.req,
body=body)
def test_create_instance_invalid_negative_max(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'max_count': -1,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
self.assertRaises(exception.ValidationError,
self.controller.create,
self.req,
body=body)
def test_create_instance_with_blank_min(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': '',
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
self.assertRaises(exception.ValidationError,
self.controller.create,
self.req,
body=body)
def test_create_instance_with_blank_max(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'max_count': '',
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
self.assertRaises(exception.ValidationError,
self.controller.create,
self.req,
body=body)
def test_create_instance_invalid_min_greater_than_max(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': 4,
'max_count': 2,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req,
body=body)
def test_create_instance_invalid_alpha_min(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': 'abcd',
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
self.assertRaises(exception.ValidationError,
self.controller.create,
self.req,
body=body)
def test_create_instance_invalid_alpha_max(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'max_count': 'abcd',
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
self.assertRaises(exception.ValidationError,
self.controller.create,
self.req,
body=body)
def test_create_multiple_instances(self):
"""Test creating multiple instances but not asking for
reservation_id
"""
self.instance_cache_by_uuid = {}
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': 2,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {'hello': 'world',
'open': 'stack'},
}
}
def _populate_instance_for_create(*args, **kwargs):
instance = args[2]
self.instance_cache_by_uuid[instance.uuid] = instance
return instance
self.stub_out('nova.compute.api.API._populate_instance_for_create',
_populate_instance_for_create)
res = self.controller.create(self.req, body=body).obj
instance_uuids = self.instance_cache_by_uuid.keys()
self.assertIn(res["server"]["id"], instance_uuids)
self._check_admin_password_len(res["server"])
def test_create_multiple_instances_pass_disabled(self):
"""Test creating multiple instances but not asking for
reservation_id
"""
self.flags(enable_instance_password=False, group='api')
self.instance_cache_by_uuid = {}
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': 2,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {'hello': 'world',
'open': 'stack'},
}
}
def _populate_instance_for_create(*args, **kwargs):
instance = args[2]
self.instance_cache_by_uuid[instance.uuid] = instance
return instance
self.stub_out('nova.compute.api.API._populate_instance_for_create',
_populate_instance_for_create)
res = self.controller.create(self.req, body=body).obj
instance_uuids = self.instance_cache_by_uuid.keys()
self.assertIn(res["server"]["id"], instance_uuids)
self._check_admin_password_missing(res["server"])
def _create_multiple_instances_resv_id_return(self, resv_id_return):
"""Test creating multiple instances with asking for
reservation_id
"""
self.instance_cache_by_uuid = {}
def _populate_instance_for_create(*args, **kwargs):
instance = args[2]
self.instance_cache_by_uuid[instance.uuid] = instance
return instance
self.stub_out('nova.compute.api.API._populate_instance_for_create',
_populate_instance_for_create)
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': 2,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {'hello': 'world',
'open': 'stack'},
'return_reservation_id': resv_id_return
}
}
res = self.controller.create(self.req, body=body)
reservation_id = res.obj['reservation_id']
self.assertNotEqual(reservation_id, "")
self.assertIsNotNone(reservation_id)
self.assertGreater(len(reservation_id), 1)
def test_create_multiple_instances_with_resv_id_return(self):
self._create_multiple_instances_resv_id_return(True)
def test_create_multiple_instances_with_string_resv_id_return(self):
self._create_multiple_instances_resv_id_return("True")
def test_create_multiple_instances_with_multiple_volume_bdm(self):
"""Test that a BadRequest is raised if multiple instances
are requested with a list of block device mappings for volumes.
"""
min_count = 2
bdm = [{'source_type': 'volume', 'uuid': 'vol-xxxx'},
{'source_type': 'volume', 'uuid': 'vol-yyyy'}
]
params = {
'block_device_mapping_v2': bdm,
'min_count': min_count
}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['min_count'], 2)
self.assertEqual(len(kwargs['block_device_mapping']), 2)
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
exc = self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params, no_image=True)
self.assertEqual("Cannot attach one or more volumes to multiple "
"instances", exc.explanation)
def test_create_multiple_instances_with_single_volume_bdm(self):
"""Test that a BadRequest is raised if multiple instances
are requested to boot from a single volume.
"""
min_count = 2
bdm = [{'source_type': 'volume', 'uuid': 'vol-xxxx'}]
params = {
'block_device_mapping_v2': bdm,
'min_count': min_count
}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['min_count'], 2)
self.assertEqual(kwargs['block_device_mapping'][0]['volume_id'],
'vol-xxxx')
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
exc = self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params, no_image=True)
self.assertEqual("Cannot attach one or more volumes to multiple "
"instances", exc.explanation)
def test_create_multiple_instance_with_non_integer_max_count(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'max_count': 2.5,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {'hello': 'world',
'open': 'stack'},
}
}
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=body)
def test_create_multiple_instance_with_non_integer_min_count(self):
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': 2.5,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {'hello': 'world',
'open': 'stack'},
}
}
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=body)
def test_create_multiple_instance_max_count_overquota_min_count_ok(self):
self.instance_cache_by_uuid = {}
self.flags(instances=3, group='quota')
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': 2,
'max_count': 5,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
def _populate_instance_for_create(*args, **kwargs):
instance = args[2]
self.instance_cache_by_uuid[instance.uuid] = instance
return instance
self.stub_out('nova.compute.api.API._populate_instance_for_create',
_populate_instance_for_create)
res = self.controller.create(self.req, body=body).obj
instance_uuids = self.instance_cache_by_uuid.keys()
self.assertIn(res["server"]["id"], instance_uuids)
def test_create_multiple_instance_max_count_overquota_min_count_over(self):
self.flags(instances=3, group='quota')
image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'min_count': 4,
'max_count': 5,
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
}
}
self.assertRaises(webob.exc.HTTPForbidden, self.controller.create,
self.req, body=body)
@mock.patch.object(compute_api.API, 'create')
def test_create_multiple_instance_with_specified_ip_neutronv2(self,
_api_mock):
_api_mock.side_effect = exception.InvalidFixedIpAndMaxCountRequest(
reason="")
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
address = '10.0.0.1'
requested_networks = [{'uuid': network, 'fixed_ip': address,
'port': port}]
params = {'networks': requested_networks}
self.body['server']['max_count'] = 2
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.MultiplePortsNotApplicable(
reason="Unable to launch multiple instances with "
"a single configured port ID. Please "
"launch your instance one by one with "
"different ports."))
def test_create_multiple_instance_with_port(self, mock_create):
requested_networks = [{'uuid': uuids.network, 'port': uuids.port}]
params = {'networks': requested_networks}
self.body['server']['max_count'] = 2
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.NetworkNotFound(
network_id=uuids.network))
def test_create_instance_with_not_found_network(self, mock_create):
requested_networks = [{'uuid': uuids.network}]
params = {'networks': requested_networks}
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.PortNotFound(port_id=uuids.port))
def test_create_instance_with_port_not_found(self, mock_create):
requested_networks = [{'uuid': uuids.network, 'port': uuids.port}]
params = {'networks': requested_networks}
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_with_network_ambiguous(self, mock_create):
mock_create.side_effect = exception.NetworkAmbiguous()
self.assertRaises(webob.exc.HTTPConflict,
self._test_create_extra, {})
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.UnableToAutoAllocateNetwork(
project_id=FAKE_UUID))
def test_create_instance_with_unable_to_auto_allocate_network(self,
mock_create):
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, {})
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.ImageNotAuthorized(
image_id=FAKE_UUID))
def test_create_instance_with_image_not_authorized(self,
mock_create):
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, {})
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.InstanceExists(
name='instance-name'))
def test_create_instance_raise_instance_exists(self, mock_create):
self.assertRaises(webob.exc.HTTPConflict,
self.controller.create,
self.req, body=self.body)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.InvalidBDMEphemeralSize)
def test_create_instance_raise_invalid_bdm_ephsize(self, mock_create):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.InvalidNUMANodesNumber(
nodes='-1'))
def test_create_instance_raise_invalid_numa_nodes(self, mock_create):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.InvalidBDMFormat(details=''))
def test_create_instance_raise_invalid_bdm_format(self, mock_create):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.InvalidBDMSwapSize)
def test_create_instance_raise_invalid_bdm_swapsize(self, mock_create):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.InvalidBDM)
def test_create_instance_raise_invalid_bdm(self, mock_create):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.ImageBadRequest(
image_id='dummy', response='dummy'))
def test_create_instance_raise_image_bad_request(self, mock_create):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.FixedIpNotFoundForAddress(
address='dummy'))
def test_create_instance_raise_fixed_ip_not_found_bad_request(self,
mock_create):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch('nova.virt.hardware.numa_get_constraints',
side_effect=exception.CPUThreadPolicyConfigurationInvalid())
def test_create_instance_raise_cpu_thread_policy_configuration_invalid(
self, mock_numa):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch('nova.virt.hardware.get_mem_encryption_constraint',
side_effect=exception.FlavorImageConflict(
message="fake conflict reason"))
def test_create_instance_raise_flavor_image_conflict(
self, mock_conflict):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch('nova.virt.hardware.get_mem_encryption_constraint',
side_effect=exception.InvalidMachineType(
message="fake conflict reason"))
def test_create_instance_raise_invalid_machine_type(
self, mock_conflict):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch('nova.virt.hardware.numa_get_constraints',
side_effect=exception.ImageCPUPinningForbidden())
def test_create_instance_raise_image_cpu_pinning_forbidden(
self, mock_numa):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch('nova.virt.hardware.numa_get_constraints',
side_effect=exception.ImageCPUThreadPolicyForbidden())
def test_create_instance_raise_image_cpu_thread_policy_forbidden(
self, mock_numa):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch('nova.virt.hardware.numa_get_constraints',
side_effect=exception.MemoryPageSizeInvalid(pagesize='-1'))
def test_create_instance_raise_memory_page_size_invalid(self, mock_numa):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch('nova.virt.hardware.numa_get_constraints',
side_effect=exception.MemoryPageSizeForbidden(pagesize='1',
against='2'))
def test_create_instance_raise_memory_page_size_forbidden(self, mock_numa):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch('nova.virt.hardware.numa_get_constraints',
side_effect=exception.RealtimeConfigurationInvalid())
def test_create_instance_raise_realtime_configuration_invalid(
self, mock_numa):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch('nova.virt.hardware.numa_get_constraints',
side_effect=exception.RealtimeMaskNotFoundOrInvalid())
def test_create_instance_raise_realtime_mask_not_found_or_invalid(
self, mock_numa):
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_invalid_personality(self, mock_create):
# Personality files have been deprecated as of v2.57
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.56')
codec = 'utf8'
content = encodeutils.safe_encode(
'b25zLiINCg0KLVJpY2hhcmQgQ$$%QQmFjaA==')
start_position = 19
end_position = 20
msg = 'invalid start byte'
mock_create.side_effect = UnicodeDecodeError(codec, content,
start_position,
end_position, msg)
self.body['server']['personality'] = [
{
"path": "/etc/banner.txt",
"contents": "b25zLiINCg0KLVJpY2hhcmQgQ$$%QQmFjaA==",
},
]
self.req.body = jsonutils.dump_as_bytes(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
def test_create_instance_without_personality_should_get_empty_list(self):
# Personality files have been deprecated as of v2.57
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.56')
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual([], kwargs['injected_files'])
return old_create(*args, **kwargs)
self.stub_out('nova.compute.api.API.create', create)
self._test_create_instance()
def test_create_instance_with_extra_personality_arg(self):
# Personality files have been deprecated as of v2.57
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.56')
self.body['server']['personality'] = [
{
"path": "/etc/banner.txt",
"contents": "b25zLiINCg0KLVJpY2hhcmQgQ$$%QQmFjaA==",
"extra_arg": "extra value"
},
]
self.assertRaises(exception.ValidationError,
self.controller.create,
self.req, body=self.body)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.PciRequestAliasNotDefined(
alias='fake_name'))
def test_create_instance_pci_alias_not_defined(self, mock_create):
# Tests that PciRequestAliasNotDefined is translated to a 400 error.
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, {})
self.assertIn('PCI alias fake_name is not defined', str(ex))
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.PciInvalidAlias(
reason='just because'))
def test_create_instance_pci_invalid_alias(self, mock_create):
# Tests that PciInvalidAlias is translated to a 400 error.
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, {})
self.assertIn('Invalid PCI alias definition', str(ex))
def test_create_instance_with_user_data(self):
value = base64.encode_as_text("A random string")
params = {'user_data': value}
self._test_create_extra(params)
def test_create_instance_with_bad_user_data(self):
value = "A random string"
params = {'user_data': value}
self.assertRaises(exception.ValidationError,
self._test_create_extra, params)
@mock.patch('nova.compute.api.API.create')
def test_create_instance_with_none_allowd_for_v20_compat_mode(self,
mock_create):
def create(context, *args, **kwargs):
self.assertIsNone(kwargs['user_data'])
return ([fakes.stub_instance_obj(context)], None)
mock_create.side_effect = create
self.req.set_legacy_v2()
params = {'user_data': None}
self._test_create_extra(params)
class ServersControllerCreateTestV219(ServersControllerCreateTest):
def _create_instance_req(self, set_desc, desc=None):
if set_desc:
self.body['server']['description'] = desc
self.req.body = jsonutils.dump_as_bytes(self.body)
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.19')
def test_create_instance_with_description(self):
self._create_instance_req(True, 'server_desc')
# The fact that the action doesn't raise is enough validation
self.controller.create(self.req, body=self.body).obj
def test_create_instance_with_none_description(self):
self._create_instance_req(True)
# The fact that the action doesn't raise is enough validation
self.controller.create(self.req, body=self.body).obj
def test_create_instance_with_empty_description(self):
self._create_instance_req(True, '')
# The fact that the action doesn't raise is enough validation
self.controller.create(self.req, body=self.body).obj
def test_create_instance_without_description(self):
self._create_instance_req(False)
# The fact that the action doesn't raise is enough validation
self.controller.create(self.req, body=self.body).obj
def test_create_instance_description_too_long(self):
self._create_instance_req(True, 'X' * 256)
self.assertRaises(exception.ValidationError, self.controller.create,
self.req, body=self.body)
def test_create_instance_description_invalid(self):
self._create_instance_req(True, "abc\0ddef")
self.assertRaises(exception.ValidationError, self.controller.create,
self.req, body=self.body)
class ServersControllerCreateTestV232(test.NoDBTestCase):
def setUp(self):
super(ServersControllerCreateTestV232, self).setUp()
self.controller = servers.ServersController()
self.body = {
'server': {
'name': 'device-tagging-server',
'imageRef': '6b0edabb-8cde-4684-a3f4-978960a51378',
'flavorRef': '2',
'networks': [{
'uuid': 'ff608d40-75e9-48cb-b745-77bb55b5eaf2'
}],
'block_device_mapping_v2': [{
'uuid': '70a599e0-31e7-49b7-b260-868f441e862b',
'source_type': 'image',
'destination_type': 'volume',
'boot_index': 0,
'volume_size': '1'
}]
}
}
self.req = fakes.HTTPRequestV21.blank(
'/%s/servers' % fakes.FAKE_PROJECT_ID, version='2.32')
self.req.method = 'POST'
self.req.headers['content-type'] = 'application/json'
def _create_server(self):
self.req.body = jsonutils.dump_as_bytes(self.body)
self.controller.create(self.req, body=self.body)
def test_create_server_no_tags(self):
with test.nested(
mock.patch.object(nova.compute.flavors, 'get_flavor_by_flavor_id',
return_value=objects.Flavor()),
mock.patch.object(
compute_api.API, 'create',
return_value=(
[{'uuid': 'f60012d9-5ba4-4547-ab48-f94ff7e62d4e'}],
1)),
):
self._create_server()
def test_create_server_tagged_nic(self):
with test.nested(
mock.patch.object(nova.compute.flavors, 'get_flavor_by_flavor_id',
return_value=objects.Flavor()),
mock.patch.object(
compute_api.API, 'create',
return_value=(
[{'uuid': 'f60012d9-5ba4-4547-ab48-f94ff7e62d4e'}],
1)),
):
self.body['server']['networks'][0]['tag'] = 'foo'
self._create_server()
def test_create_server_tagged_bdm(self):
with test.nested(
mock.patch.object(nova.compute.flavors, 'get_flavor_by_flavor_id',
return_value=objects.Flavor()),
mock.patch.object(
compute_api.API, 'create',
return_value=(
[{'uuid': 'f60012d9-5ba4-4547-ab48-f94ff7e62d4e'}],
1)),
):
self.body['server']['block_device_mapping_v2'][0]['tag'] = 'foo'
self._create_server()
class ServersControllerCreateTestV237(test.NoDBTestCase):
"""Tests server create scenarios with the v2.37 microversion.
These tests are mostly about testing the validation on the 2.37
server create request with emphasis on negative scenarios.
"""
def setUp(self):
super(ServersControllerCreateTestV237, self).setUp()
# Create the server controller.
self.controller = servers.ServersController()
# Define a basic server create request body which tests can customize.
self.body = {
'server': {
'name': 'auto-allocate-test',
'imageRef': '6b0edabb-8cde-4684-a3f4-978960a51378',
'flavorRef': '2',
},
}
# Create a fake request using the 2.37 microversion.
self.req = fakes.HTTPRequestV21.blank(
'/%s/servers' % fakes.FAKE_PROJECT_ID, version='2.37')
self.req.method = 'POST'
self.req.headers['content-type'] = 'application/json'
def _create_server(self, networks):
self.body['server']['networks'] = networks
self.req.body = jsonutils.dump_as_bytes(self.body)
return self.controller.create(self.req, body=self.body).obj['server']
def test_create_server_auth_pre_2_37_fails(self):
"""Negative test to make sure you can't pass 'auto' before 2.37"""
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.36')
self.assertRaises(exception.ValidationError, self._create_server,
'auto')
def test_create_server_no_requested_networks_fails(self):
"""Negative test for a server create request with no networks requested
which should fail with the v2.37 schema validation.
"""
self.assertRaises(exception.ValidationError, self._create_server, None)
def test_create_server_network_id_not_uuid_fails(self):
"""Negative test for a server create request where the requested
network id is not one of the auto/none enums.
"""
self.assertRaises(exception.ValidationError, self._create_server,
'not-auto-or-none')
def test_create_server_network_id_empty_string_fails(self):
"""Negative test for a server create request where the requested
network id is the empty string.
"""
self.assertRaises(exception.ValidationError, self._create_server, '')
@mock.patch.object(context.RequestContext, 'can')
def test_create_server_networks_none_skip_policy(self, context_can):
"""Test to ensure skip checking policy rule create:attach_network,
when networks is 'none' which means no network will be allocated.
"""
with test.nested(
mock.patch('nova.objects.service.get_minimum_version_all_cells',
return_value=14),
mock.patch.object(nova.compute.flavors, 'get_flavor_by_flavor_id',
return_value=objects.Flavor()),
mock.patch.object(
compute_api.API, 'create',
return_value=(
[{'uuid': 'f9bccadf-5ab1-4a56-9156-c00c178fe5f5'}],
1)),
):
network_policy = server_policies.SERVERS % 'create:attach_network'
self._create_server('none')
call_list = [c for c in context_can.call_args_list
if c[0][0] == network_policy]
self.assertEqual(0, len(call_list))
@mock.patch.object(objects.Flavor, 'get_by_flavor_id',
side_effect=exception.FlavorNotFound(flavor_id='2'))
def test_create_server_auto_flavornotfound(self, get_flavor):
"""Tests that requesting auto networking is OK. This test
short-circuits on a FlavorNotFound error.
"""
self.useFixture(nova_fixtures.AllServicesCurrent())
ex = self.assertRaises(
webob.exc.HTTPBadRequest, self._create_server, 'auto')
# make sure it was a flavor not found error and not something else
self.assertIn('Flavor 2 could not be found', str(ex))
@mock.patch.object(objects.Flavor, 'get_by_flavor_id',
side_effect=exception.FlavorNotFound(flavor_id='2'))
def test_create_server_none_flavornotfound(self, get_flavor):
"""Tests that requesting none for networking is OK. This test
short-circuits on a FlavorNotFound error.
"""
self.useFixture(nova_fixtures.AllServicesCurrent())
ex = self.assertRaises(
webob.exc.HTTPBadRequest, self._create_server, 'none')
# make sure it was a flavor not found error and not something else
self.assertIn('Flavor 2 could not be found', str(ex))
@mock.patch.object(objects.Flavor, 'get_by_flavor_id',
side_effect=exception.FlavorNotFound(flavor_id='2'))
def test_create_server_multiple_specific_nics_flavornotfound(self,
get_flavor):
"""Tests that requesting multiple specific network IDs is OK. This test
short-circuits on a FlavorNotFound error.
"""
self.useFixture(nova_fixtures.AllServicesCurrent())
ex = self.assertRaises(
webob.exc.HTTPBadRequest, self._create_server,
[{'uuid': 'e3b686a8-b91d-4a61-a3fc-1b74bb619ddb'},
{'uuid': 'e0f00941-f85f-46ec-9315-96ded58c2f14'}])
# make sure it was a flavor not found error and not something else
self.assertIn('Flavor 2 could not be found', str(ex))
def test_create_server_legacy_neutron_network_id_fails(self):
"""Tests that we no longer support the legacy br-<uuid> format for
a network id.
"""
uuid = 'br-00000000-0000-0000-0000-000000000000'
self.assertRaises(exception.ValidationError, self._create_server,
[{'uuid': uuid}])
@ddt.ddt
class ServersControllerCreateTestV252(test.NoDBTestCase):
def setUp(self):
super(ServersControllerCreateTestV252, self).setUp()
self.controller = servers.ServersController()
self.body = {
'server': {
'name': 'device-tagging-server',
'imageRef': '6b0edabb-8cde-4684-a3f4-978960a51378',
'flavorRef': '2',
'networks': [{
'uuid': 'ff608d40-75e9-48cb-b745-77bb55b5eaf2'
}]
}
}
self.req = fakes.HTTPRequestV21.blank(
'/%s/servers' % fakes.FAKE_PROJECT_ID, version='2.52')
self.req.method = 'POST'
self.req.headers['content-type'] = 'application/json'
def _create_server(self, tags):
self.body['server']['tags'] = tags
self.req.body = jsonutils.dump_as_bytes(self.body)
return self.controller.create(self.req, body=self.body).obj['server']
def test_create_server_with_tags_pre_2_52_fails(self):
"""Negative test to make sure you can't pass 'tags' before 2.52"""
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.51')
self.assertRaises(
exception.ValidationError, self._create_server, ['tag1'])
@ddt.data([','],
['/'],
['a' * (tag.MAX_TAG_LENGTH + 1)],
['a'] * (instance_obj.MAX_TAG_COUNT + 1),
[''],
[1, 2, 3],
{'tag': 'tag'})
def test_create_server_with_tags_incorrect_tags(self, tags):
"""Negative test to incorrect tags are not allowed"""
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.52')
self.assertRaises(
exception.ValidationError, self._create_server, tags)
class ServersControllerCreateTestV257(test.NoDBTestCase):
"""Tests that trying to create a server with personality files using
microversion 2.57 fails.
"""
def test_create_server_with_personality_fails(self):
controller = servers.ServersController()
body = {
'server': {
'name': 'no-personality-files',
'imageRef': '6b0edabb-8cde-4684-a3f4-978960a51378',
'flavorRef': '2',
'networks': 'auto',
'personality': [{
'path': '/path/to/file',
'contents': 'ZWNobyAiaGVsbG8gd29ybGQi'
}]
}
}
req = fakes.HTTPRequestV21.blank('/servers', version='2.57')
req.body = jsonutils.dump_as_bytes(body)
req.method = 'POST'
req.headers['content-type'] = 'application/json'
ex = self.assertRaises(
exception.ValidationError, controller.create, req, body=body)
self.assertIn('personality', str(ex))
@mock.patch('nova.compute.utils.check_num_instances_quota',
new=lambda *args, **kwargs: 1)
class ServersControllerCreateTestV260(test.NoDBTestCase):
"""Negative tests for creating a server with a multiattach volume."""
def setUp(self):
super(ServersControllerCreateTestV260, self).setUp()
self.useFixture(nova_fixtures.NoopQuotaDriverFixture())
self.controller = servers.ServersController()
get_flavor_mock = mock.patch(
'nova.compute.flavors.get_flavor_by_flavor_id',
return_value=fake_flavor.fake_flavor_obj(
context.get_admin_context(), flavorid='1',
expected_attrs=['extra_specs']))
get_flavor_mock.start()
self.addCleanup(get_flavor_mock.stop)
reqspec_create_mock = mock.patch(
'nova.objects.RequestSpec.create')
reqspec_create_mock.start()
self.addCleanup(reqspec_create_mock.stop)
volume_get_mock = mock.patch(
'nova.volume.cinder.API.get',
return_value={'id': uuids.fake_volume_id, 'multiattach': True})
volume_get_mock.start()
self.addCleanup(volume_get_mock.stop)
def _post_server(self, version=None):
body = {
'server': {
'name': 'multiattach',
'flavorRef': '1',
'networks': 'none',
'block_device_mapping_v2': [{
'uuid': uuids.fake_volume_id,
'source_type': 'volume',
'destination_type': 'volume',
'boot_index': 0,
'delete_on_termination': True}]
}
}
req = fakes.HTTPRequestV21.blank(
'/servers', version=version or '2.60')
req.body = jsonutils.dump_as_bytes(body)
req.method = 'POST'
req.headers['content-type'] = 'application/json'
return self.controller.create(req, body=body)
def test_create_server_with_multiattach_fails_old_microversion(self):
"""Tests the case that the user tries to boot from volume with a
multiattach volume but before using microversion 2.60.
"""
self.useFixture(nova_fixtures.AllServicesCurrent())
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self._post_server, '2.59')
self.assertIn('Multiattach volumes are only supported starting with '
'compute API version 2.60', str(ex))
class ServersControllerCreateTestV263(ServersControllerCreateTest):
def _create_instance_req(self, certs=None):
self.body['server']['trusted_image_certificates'] = certs
self.flags(verify_glance_signatures=True, group='glance')
self.flags(enable_certificate_validation=True, group='glance')
self.req.body = jsonutils.dump_as_bytes(self.body)
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.63')
def test_create_instance_with_trusted_certs(self):
"""Test create with valid trusted_image_certificates argument"""
self._create_instance_req(
['0b5d2c72-12cc-4ba6-a8d7-3ff5cc1d8cb8',
'674736e3-f25c-405c-8362-bbf991e0ce0a'])
# The fact that the action doesn't raise is enough validation
self.controller.create(self.req, body=self.body).obj
def test_create_instance_without_trusted_certs(self):
"""Test create without trusted image certificates"""
self._create_instance_req()
# The fact that the action doesn't raise is enough validation
self.controller.create(self.req, body=self.body).obj
def test_create_instance_with_empty_trusted_cert_id(self):
"""Make sure we can't create with an empty certificate ID"""
self._create_instance_req([''])
ex = self.assertRaises(
exception.ValidationError, self.controller.create, self.req,
body=self.body)
self.assertIn('is too short', str(ex))
def test_create_instance_with_empty_trusted_certs(self):
"""Make sure we can't create with an empty array of IDs"""
self.body['server']['trusted_image_certificates'] = []
self.req.body = jsonutils.dump_as_bytes(self.body)
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.63')
ex = self.assertRaises(
exception.ValidationError, self.controller.create, self.req,
body=self.body)
self.assertIn('is too short', str(ex))
def test_create_instance_with_too_many_trusted_certs(self):
"""Make sure we can't create with an array of >50 unique IDs"""
self._create_instance_req(['cert{}'.format(i) for i in range(51)])
ex = self.assertRaises(
exception.ValidationError, self.controller.create, self.req,
body=self.body)
self.assertIn('is too long', str(ex))
def test_create_instance_with_nonunique_trusted_certs(self):
"""Make sure we can't create with a non-unique array of IDs"""
self._create_instance_req(['cert', 'cert'])
ex = self.assertRaises(
exception.ValidationError, self.controller.create, self.req,
body=self.body)
self.assertIn('has non-unique elements', str(ex))
def test_create_instance_with_invalid_trusted_cert_id(self):
"""Make sure we can't create with non-string certificate IDs"""
self._create_instance_req([1, 2])
ex = self.assertRaises(
exception.ValidationError, self.controller.create, self.req,
body=self.body)
self.assertIn('is not of type', str(ex))
def test_create_instance_with_invalid_trusted_certs(self):
"""Make sure we can't create with certificates in a non-array"""
self._create_instance_req("not-an-array")
ex = self.assertRaises(
exception.ValidationError, self.controller.create, self.req,
body=self.body)
self.assertIn('is not of type', str(ex))
def test_create_server_with_trusted_certs_pre_2_63_fails(self):
"""Make sure we can't use trusted_certs before 2.63"""
self._create_instance_req(['trusted-cert-id'])
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.62')
ex = self.assertRaises(
exception.ValidationError, self.controller.create, self.req,
body=self.body)
self.assertIn('Additional properties are not allowed', str(ex))
@mock.patch.object(compute_api.API, 'create')
def test_create_server_with_cert_validation_error(
self, mock_create):
mock_create.side_effect = exception.CertificateValidationFailed(
cert_uuid="cert id", reason="test cert validation error")
self._create_instance_req(['trusted-cert-id'])
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req,
body=self.body)
self.assertIn('test cert validation error', str(ex))
class ServersControllerCreateTestV267(ServersControllerCreateTest):
def setUp(self):
super(ServersControllerCreateTestV267, self).setUp()
self.block_device_mapping_v2 = [
{'uuid': '70a599e0-31e7-49b7-b260-868f441e862b',
'source_type': 'image',
'destination_type': 'volume',
'boot_index': 0,
'volume_size': '1',
'volume_type': 'fake-lvm-1'
}]
def _test_create_extra(self, *args, **kwargs):
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.67')
return super(ServersControllerCreateTestV267, self)._test_create_extra(
*args, **kwargs)
def test_create_server_with_trusted_volume_type_pre_2_67_fails(self):
"""Make sure we can't use volume_type before 2.67"""
self.body['server'].update(
{'block_device_mapping_v2': self.block_device_mapping_v2})
self.req.body = jsonutils.dump_as_bytes(self.block_device_mapping_v2)
self.req.api_version_request = \
api_version_request.APIVersionRequest('2.66')
ex = self.assertRaises(
exception.ValidationError, self.controller.create, self.req,
body=self.body)
self.assertIn("'volume_type' was unexpected", str(ex))
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.VolumeTypeNotFound(
id_or_name='fake-lvm-1'))
def test_create_instance_with_volume_type_not_found(self, mock_create):
"""Trying to boot from volume with a volume type that does not exist
will result in a 400 error.
"""
params = {'block_device_mapping_v2': self.block_device_mapping_v2}
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
self.assertIn('Volume type fake-lvm-1 could not be found', str(ex))
def test_create_instance_with_volume_type_empty_string(self):
"""Test passing volume_type='' which is accepted but not used."""
self.block_device_mapping_v2[0]['volume_type'] = ''
params = {'block_device_mapping_v2': self.block_device_mapping_v2}
self._test_create_extra(params)
def test_create_instance_with_none_volume_type(self):
"""Test passing volume_type=None which is accepted but not used."""
self.block_device_mapping_v2[0]['volume_type'] = None
params = {'block_device_mapping_v2': self.block_device_mapping_v2}
self._test_create_extra(params)
def test_create_instance_without_volume_type(self):
"""Test passing without volume_type which is accepted but not used."""
self.block_device_mapping_v2[0].pop('volume_type')
params = {'block_device_mapping_v2': self.block_device_mapping_v2}
self._test_create_extra(params)
def test_create_instance_with_volume_type_too_long(self):
"""Tests the maxLength schema validation on volume_type."""
self.block_device_mapping_v2[0]['volume_type'] = 'X' * 256
params = {'block_device_mapping_v2': self.block_device_mapping_v2}
ex = self.assertRaises(exception.ValidationError,
self._test_create_extra, params)
self.assertIn('is too long', str(ex))
class ServersControllerCreateTestV274(ServersControllerCreateTest):
def setUp(self):
super(ServersControllerCreateTestV274, self).setUp()
self.req.environ['nova.context'] = fakes.FakeRequestContext(
user_id='fake_user',
project_id=self.project_id,
is_admin=True)
self.mock_get = self.useFixture(
fixtures.MockPatch('nova.scheduler.client.report.'
'SchedulerReportClient.get')).mock
def _generate_req(self, host=None, node=None, az=None,
api_version='2.74'):
if host:
self.body['server']['host'] = host
if node:
self.body['server']['hypervisor_hostname'] = node
if az:
self.body['server']['availability_zone'] = az
self.req.body = jsonutils.dump_as_bytes(self.body)
self.req.api_version_request = \
api_version_request.APIVersionRequest(api_version)
def test_create_instance_with_invalid_host(self):
self._generate_req(host='node-invalid')
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
self.assertIn('Compute host node-invalid could not be found.', str(ex))
def test_create_instance_with_non_string_host(self):
self._generate_req(host=123)
ex = self.assertRaises(exception.ValidationError,
self.controller.create,
self.req, body=self.body)
self.assertIn("Invalid input for field/attribute host.", str(ex))
def test_create_instance_with_invalid_hypervisor_hostname(self):
get_resp = mock.Mock()
get_resp.status_code = 404
self.mock_get.return_value = get_resp
self._generate_req(node='node-invalid')
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
self.assertIn('Compute host node-invalid could not be found.', str(ex))
def test_create_instance_with_non_string_hypervisor_hostname(self):
get_resp = mock.Mock()
get_resp.status_code = 404
self.mock_get.return_value = get_resp
self._generate_req(node=123)
ex = self.assertRaises(exception.ValidationError,
self.controller.create,
self.req, body=self.body)
self.assertIn("Invalid input for field/attribute hypervisor_hostname.",
str(ex))
def test_create_instance_with_invalid_host_and_hypervisor_hostname(self):
self._generate_req(host='host-invalid', node='node-invalid')
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
self.assertIn('Compute host host-invalid could not be found.', str(ex))
def test_create_instance_with_non_string_host_and_hypervisor_hostname(
self):
self._generate_req(host=123, node=123)
ex = self.assertRaises(exception.ValidationError,
self.controller.create,
self.req, body=self.body)
self.assertIn("Invalid input for field/attribute", str(ex))
def test_create_instance_pre_274(self):
self._generate_req(host='host', node='node', api_version='2.73')
ex = self.assertRaises(exception.ValidationError,
self.controller.create,
self.req, body=self.body)
self.assertIn("Invalid input for field/attribute server.", str(ex))
def test_create_instance_mutual(self):
self._generate_req(host='host', node='node', az='nova:host:node')
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
self.assertIn("mutually exclusive", str(ex))
def test_create_instance_private_flavor(self):
# Here we use admin context, so if we do not pass it or
# we do not anything, the test case will be failed.
pass
class ServersControllerCreateTestWithMock(test.TestCase):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
def setUp(self):
"""Shared implementation for tests below that create instance."""
super(ServersControllerCreateTestWithMock, self).setUp()
self.flags(enable_instance_password=True, group='api')
self.controller = servers.ServersController()
self.body = {
'server': {
'name': 'server_test',
'imageRef': self.image_uuid,
'flavorRef': self.flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
},
}
self.req = fakes.HTTPRequest.blank(
'/%s/servers' % fakes.FAKE_PROJECT_ID)
self.req.method = 'POST'
self.req.headers["content-type"] = "application/json"
def _test_create_extra(self, params, no_image=False):
self.body['server']['flavorRef'] = 2
if no_image:
self.body['server'].pop('imageRef', None)
self.body['server'].update(params)
self.req.body = jsonutils.dump_as_bytes(self.body)
self.req.headers["content-type"] = "application/json"
self.controller.create(self.req, body=self.body).obj['server']
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_with_fixed_ip_already_in_use(self, create_mock):
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
address = '10.0.2.3'
requested_networks = [{'uuid': network, 'fixed_ip': address}]
params = {'networks': requested_networks}
create_mock.side_effect = exception.FixedIpAlreadyInUse(
address=address,
instance_uuid=network)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
self.assertEqual(1, len(create_mock.call_args_list))
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_with_invalid_fixed_ip(self, create_mock):
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
address = '999.0.2.3'
requested_networks = [{'uuid': network, 'fixed_ip': address}]
params = {'networks': requested_networks}
self.assertRaises(exception.ValidationError,
self._test_create_extra, params)
self.assertFalse(create_mock.called)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.InvalidVolume(reason='error'))
def test_create_instance_with_invalid_volume_error(self, create_mock):
# Tests that InvalidVolume is translated to a 400 error.
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, {})
@mock.patch.object(
compute_api.API, 'create',
side_effect=exception.VolumeNotFound(volume_id='foo'))
def test_create_instance_with_volume_not_found_error(self, create_mock):
# Tests that InvalidVolume is translated to a 400 error.
self.assertRaises(
webob.exc.HTTPBadRequest, self._test_create_extra, {})
class ServersViewBuilderTest(test.TestCase):
project_id = fakes.FAKE_PROJECT_ID
def setUp(self):
super(ServersViewBuilderTest, self).setUp()
fakes.stub_out_nw_api(self)
self.flags(group='glance', api_servers=['http://localhost:9292'])
nw_cache_info = self._generate_nw_cache_info()
db_inst = fakes.stub_instance(
id=1,
image_ref="5",
uuid=FAKE_UUID,
display_name="test_server",
include_fake_metadata=False,
availability_zone='nova',
nw_cache=nw_cache_info,
launched_at=None,
terminated_at=None,
task_state=None,
vm_state=vm_states.ACTIVE,
power_state=1)
fakes.stub_out_secgroup_api(
self, security_groups=[{'name': 'default'}])
self.stub_out('nova.db.api.'
'block_device_mapping_get_all_by_instance_uuids',
fake_bdms_get_all_by_instance_uuids)
self.stub_out('nova.objects.InstanceMappingList.'
'_get_by_instance_uuids_from_db',
fake_get_inst_mappings_by_instance_uuids_from_db)
self.uuid = db_inst['uuid']
self.view_builder = views.servers.ViewBuilder()
self.request = fakes.HTTPRequestV21.blank("/%s" % self.project_id)
self.request.context = context.RequestContext('fake', self.project_id)
self.instance = fake_instance.fake_instance_obj(
self.request.context,
expected_attrs=instance_obj.INSTANCE_DEFAULT_FIELDS,
**db_inst)
self.self_link = "http://localhost/v2/%s/servers/%s" % (
self.project_id, self.uuid)
self.bookmark_link = "http://localhost/%s/servers/%s" % (
self.project_id, self.uuid)
def _generate_nw_cache_info(self):
fixed_ipv4 = ('192.168.1.100', '192.168.2.100', '192.168.3.100')
fixed_ipv6 = ('2001:db8:0:1::1',)
def _ip(ip):
return {'address': ip, 'type': 'fixed'}
nw_cache = [
{'address': 'aa:aa:aa:aa:aa:aa',
'id': 1,
'network': {'bridge': 'br0',
'id': 1,
'label': 'test1',
'subnets': [{'cidr': '192.168.1.0/24',
'ips': [_ip(fixed_ipv4[0])]},
{'cidr': 'b33f::/64',
'ips': [_ip(fixed_ipv6[0])]}]}},
{'address': 'bb:bb:bb:bb:bb:bb',
'id': 2,
'network': {'bridge': 'br0',
'id': 1,
'label': 'test1',
'subnets': [{'cidr': '192.168.2.0/24',
'ips': [_ip(fixed_ipv4[1])]}]}},
{'address': 'cc:cc:cc:cc:cc:cc',
'id': 3,
'network': {'bridge': 'br0',
'id': 2,
'label': 'test2',
'subnets': [{'cidr': '192.168.3.0/24',
'ips': [_ip(fixed_ipv4[2])]}]}}]
return nw_cache
def test_get_flavor_valid_flavor(self):
flavor_bookmark = "http://localhost/%s/flavors/1" % self.project_id
expected = {"id": "1",
"links": [{"rel": "bookmark",
"href": flavor_bookmark}]}
result = self.view_builder._get_flavor(self.request, self.instance,
False)
self.assertEqual(result, expected)
@mock.patch('nova.context.scatter_gather_cells')
def test_get_volumes_attached_with_faily_cells(self, mock_sg):
bdms = fake_bdms_get_all_by_instance_uuids()
# just faking a nova list scenario
mock_sg.return_value = {
uuids.cell1: bdms[0],
uuids.cell2: exception.BDMNotFound(id='fake')
}
ctxt = context.RequestContext('fake', fakes.FAKE_PROJECT_ID)
result = self.view_builder._get_instance_bdms_in_multiple_cells(
ctxt, [self.instance.uuid])
# will get the result from cell1
self.assertEqual(result, bdms[0])
mock_sg.assert_called_once()
def test_build_server(self):
expected_server = {
"server": {
"id": self.uuid,
"name": "test_server",
"links": [
{
"rel": "self",
"href": self.self_link,
},
{
"rel": "bookmark",
"href": self.bookmark_link,
},
],
}
}
output = self.view_builder.basic(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_with_project_id(self):
expected_server = {
"server": {
"id": self.uuid,
"name": "test_server",
"links": [
{
"rel": "self",
"href": self.self_link,
},
{
"rel": "bookmark",
"href": self.bookmark_link,
},
],
}
}
output = self.view_builder.basic(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_detail(self):
image_bookmark = "http://localhost/%s/images/5" % self.project_id
flavor_bookmark = "http://localhost/%s/flavors/1" % self.project_id
expected_server = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 0,
"name": "test_server",
"status": "ACTIVE",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': '2001:db8:0:1::1',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 4, 'addr': '192.168.2.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'bb:bb:bb:bb:bb:bb'}
],
'test2': [
{'version': 4, 'addr': '192.168.3.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'cc:cc:cc:cc:cc:cc'},
]
},
"metadata": {},
"links": [
{
"rel": "self",
"href": self.self_link,
},
{
"rel": "bookmark",
"href": self.bookmark_link,
},
],
"OS-DCF:diskConfig": "MANUAL",
"accessIPv4": '',
"accessIPv6": '',
"OS-EXT-AZ:availability_zone": "nova",
"config_drive": None,
"OS-EXT-SRV-ATTR:host": None,
"OS-EXT-SRV-ATTR:hypervisor_hostname": None,
"OS-EXT-SRV-ATTR:instance_name": "instance-00000001",
"key_name": '',
"OS-SRV-USG:launched_at": None,
"OS-SRV-USG:terminated_at": None,
"security_groups": [{'name': 'default'}],
"OS-EXT-STS:task_state": None,
"OS-EXT-STS:vm_state": vm_states.ACTIVE,
"OS-EXT-STS:power_state": 1,
"os-extended-volumes:volumes_attached": [
{'id': 'some_volume_1'},
{'id': 'some_volume_2'},
]
}
}
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_detail_with_fault(self):
self.instance['vm_state'] = vm_states.ERROR
self.instance['fault'] = fake_instance.fake_fault_obj(
self.request.context, self.uuid)
image_bookmark = "http://localhost/%s/images/5" % self.project_id
flavor_bookmark = "http://localhost/%s/flavors/1" % self.project_id
expected_server = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"name": "test_server",
"status": "ERROR",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': '2001:db8:0:1::1',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 4, 'addr': '192.168.2.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'bb:bb:bb:bb:bb:bb'}
],
'test2': [
{'version': 4, 'addr': '192.168.3.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'cc:cc:cc:cc:cc:cc'},
]
},
"metadata": {},
"links": [
{
"rel": "self",
"href": self.self_link,
},
{
"rel": "bookmark",
"href": self.bookmark_link,
},
],
"fault": {
"code": 404,
"created": "2010-10-10T12:00:00Z",
"message": "HTTPNotFound",
"details": "Stock details for test",
},
"OS-DCF:diskConfig": "MANUAL",
"accessIPv4": '',
"accessIPv6": '',
"OS-EXT-AZ:availability_zone": "nova",
"config_drive": None,
"OS-EXT-SRV-ATTR:host": None,
"OS-EXT-SRV-ATTR:hypervisor_hostname": None,
"OS-EXT-SRV-ATTR:instance_name": "instance-00000001",
"key_name": '',
"OS-SRV-USG:launched_at": None,
"OS-SRV-USG:terminated_at": None,
"security_groups": [{'name': 'default'}],
"OS-EXT-STS:task_state": None,
"OS-EXT-STS:vm_state": vm_states.ERROR,
"OS-EXT-STS:power_state": 1,
"os-extended-volumes:volumes_attached": [
{'id': 'some_volume_1'},
{'id': 'some_volume_2'},
]
}
}
self.request.context = context.RequestContext('fake', self.project_id)
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_detail_with_fault_that_has_been_deleted(self):
self.instance['deleted'] = 1
self.instance['vm_state'] = vm_states.ERROR
fault = fake_instance.fake_fault_obj(self.request.context,
self.uuid, code=500,
message="No valid host was found")
self.instance['fault'] = fault
expected_fault = {"code": 500,
"created": "2010-10-10T12:00:00Z",
"message": "No valid host was found"}
self.request.context = context.RequestContext('fake', self.project_id)
output = self.view_builder.show(self.request, self.instance)
# Regardless of vm_state deleted servers should be DELETED
self.assertEqual("DELETED", output['server']['status'])
self.assertThat(output['server']['fault'],
matchers.DictMatches(expected_fault))
@mock.patch('nova.objects.InstanceMapping.get_by_instance_uuid')
def test_build_server_detail_with_fault_no_instance_mapping(self,
mock_im):
self.instance['vm_state'] = vm_states.ERROR
mock_im.side_effect = exception.InstanceMappingNotFound(uuid='foo')
self.request.context = context.RequestContext('fake', self.project_id)
self.view_builder.show(self.request, self.instance)
mock_im.assert_called_once_with(mock.ANY, self.uuid)
@mock.patch('nova.objects.InstanceMapping.get_by_instance_uuid')
def test_build_server_detail_with_fault_loaded(self, mock_im):
self.instance['vm_state'] = vm_states.ERROR
fault = fake_instance.fake_fault_obj(self.request.context,
self.uuid, code=500,
message="No valid host was found")
self.instance['fault'] = fault
self.request.context = context.RequestContext('fake', self.project_id)
self.view_builder.show(self.request, self.instance)
self.assertFalse(mock_im.called)
def test_build_server_detail_with_fault_no_details_not_admin(self):
self.instance['vm_state'] = vm_states.ERROR
self.instance['fault'] = fake_instance.fake_fault_obj(
self.request.context,
self.uuid,
code=500,
message='Error')
expected_fault = {"code": 500,
"created": "2010-10-10T12:00:00Z",
"message": "Error"}
self.request.context = context.RequestContext('fake', self.project_id)
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output['server']['fault'],
matchers.DictMatches(expected_fault))
def test_build_server_detail_with_fault_admin(self):
self.instance['vm_state'] = vm_states.ERROR
self.instance['fault'] = fake_instance.fake_fault_obj(
self.request.context,
self.uuid,
code=500,
message='Error')
expected_fault = {"code": 500,
"created": "2010-10-10T12:00:00Z",
"message": "Error",
'details': 'Stock details for test'}
self.request.environ['nova.context'].is_admin = True
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output['server']['fault'],
matchers.DictMatches(expected_fault))
def test_build_server_detail_with_fault_no_details_admin(self):
self.instance['vm_state'] = vm_states.ERROR
self.instance['fault'] = fake_instance.fake_fault_obj(
self.request.context,
self.uuid,
code=500,
message='Error',
details='')
expected_fault = {"code": 500,
"created": "2010-10-10T12:00:00Z",
"message": "Error"}
self.request.environ['nova.context'].is_admin = True
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output['server']['fault'],
matchers.DictMatches(expected_fault))
def test_build_server_detail_with_fault_but_active(self):
self.instance['vm_state'] = vm_states.ACTIVE
self.instance['progress'] = 100
self.instance['fault'] = fake_instance.fake_fault_obj(
self.request.context, self.uuid)
output = self.view_builder.show(self.request, self.instance)
self.assertNotIn('fault', output['server'])
def test_build_server_detail_active_status(self):
# set the power state of the instance to running
self.instance['vm_state'] = vm_states.ACTIVE
self.instance['progress'] = 100
image_bookmark = "http://localhost/%s/images/5" % self.project_id
flavor_bookmark = "http://localhost/%s/flavors/1" % self.project_id
expected_server = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 100,
"name": "test_server",
"status": "ACTIVE",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': '2001:db8:0:1::1',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 4, 'addr': '192.168.2.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'bb:bb:bb:bb:bb:bb'}
],
'test2': [
{'version': 4, 'addr': '192.168.3.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'cc:cc:cc:cc:cc:cc'},
]
},
"metadata": {},
"links": [
{
"rel": "self",
"href": self.self_link,
},
{
"rel": "bookmark",
"href": self.bookmark_link,
},
],
"OS-DCF:diskConfig": "MANUAL",
"accessIPv4": '',
"accessIPv6": '',
"OS-EXT-AZ:availability_zone": "nova",
"config_drive": None,
"OS-EXT-SRV-ATTR:host": None,
"OS-EXT-SRV-ATTR:hypervisor_hostname": None,
"OS-EXT-SRV-ATTR:instance_name": "instance-00000001",
"key_name": '',
"OS-SRV-USG:launched_at": None,
"OS-SRV-USG:terminated_at": None,
"security_groups": [{'name': 'default'}],
"OS-EXT-STS:task_state": None,
"OS-EXT-STS:vm_state": vm_states.ACTIVE,
"OS-EXT-STS:power_state": 1,
"os-extended-volumes:volumes_attached": [
{'id': 'some_volume_1'},
{'id': 'some_volume_2'},
]
}
}
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_detail_with_metadata(self):
metadata = []
metadata.append(models.InstanceMetadata(key="Open", value="Stack"))
metadata = nova_utils.metadata_to_dict(metadata)
self.instance['metadata'] = metadata
image_bookmark = "http://localhost/%s/images/5" % self.project_id
flavor_bookmark = "http://localhost/%s/flavors/1" % self.project_id
expected_server = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 0,
"name": "test_server",
"status": "ACTIVE",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': '2001:db8:0:1::1',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 4, 'addr': '192.168.2.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'bb:bb:bb:bb:bb:bb'}
],
'test2': [
{'version': 4, 'addr': '192.168.3.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'cc:cc:cc:cc:cc:cc'},
]
},
"metadata": {"Open": "Stack"},
"links": [
{
"rel": "self",
"href": self.self_link,
},
{
"rel": "bookmark",
"href": self.bookmark_link,
},
],
"OS-DCF:diskConfig": "MANUAL",
"accessIPv4": '',
"accessIPv6": '',
"OS-EXT-AZ:availability_zone": "nova",
"config_drive": None,
"OS-EXT-SRV-ATTR:host": None,
"OS-EXT-SRV-ATTR:hypervisor_hostname": None,
"OS-EXT-SRV-ATTR:instance_name": "instance-00000001",
"key_name": '',
"OS-SRV-USG:launched_at": None,
"OS-SRV-USG:terminated_at": None,
"security_groups": [{'name': 'default'}],
"OS-EXT-STS:task_state": None,
"OS-EXT-STS:vm_state": vm_states.ACTIVE,
"OS-EXT-STS:power_state": 1,
"os-extended-volumes:volumes_attached": [
{'id': 'some_volume_1'},
{'id': 'some_volume_2'},
]
}
}
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
class ServersViewBuilderTestV269(ServersViewBuilderTest):
"""Server ViewBuilder test for microversion 2.69
The intent here is simply to verify that when showing server details
after microversion 2.69 the response could have missing keys for those
servers from the down cells.
"""
wsgi_api_version = '2.69'
def setUp(self):
super(ServersViewBuilderTestV269, self).setUp()
self.view_builder = views.servers.ViewBuilder()
self.ctxt = context.RequestContext('fake', self.project_id)
def fake_is_supported(req, min_version="2.1", max_version="2.69"):
return (fakes.api_version.APIVersionRequest(max_version) >=
req.api_version_request >=
fakes.api_version.APIVersionRequest(min_version))
self.stub_out('nova.api.openstack.api_version_request.is_supported',
fake_is_supported)
def req(self, url, use_admin_context=False):
return fakes.HTTPRequest.blank(url,
use_admin_context=use_admin_context,
version=self.wsgi_api_version)
def test_get_server_list_detail_with_down_cells(self):
# Fake out 1 partially constructued instance and one full instance.
self.instances = [
self.instance,
objects.Instance(
context=self.ctxt,
uuid=uuids.fake1,
project_id=fakes.FAKE_PROJECT_ID,
created_at=datetime.datetime(1955, 11, 5)
)
]
req = self.req('/%s/servers/detail' % self.project_id)
output = self.view_builder.detail(req, self.instances, True)
self.assertEqual(2, len(output['servers']))
image_bookmark = "http://localhost/%s/images/5" % self.project_id
expected = {
"servers": [{
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 0,
"name": "test_server",
"status": "ACTIVE",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
'disk': 1,
'ephemeral': 1,
'vcpus': 1,
'ram': 256,
'original_name': 'flavor1',
'extra_specs': {},
'swap': 0
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': '2001:db8:0:1::1',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 4, 'addr': '192.168.2.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'bb:bb:bb:bb:bb:bb'}
],
'test2': [
{'version': 4, 'addr': '192.168.3.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'cc:cc:cc:cc:cc:cc'},
]
},
"metadata": {},
"tags": [],
"links": [
{
"rel": "self",
"href": self.self_link,
},
{
"rel": "bookmark",
"href": self.bookmark_link,
},
],
"OS-DCF:diskConfig": "MANUAL",
"OS-EXT-SRV-ATTR:root_device_name": None,
"accessIPv4": '',
"accessIPv6": '',
"host_status": '',
"OS-EXT-SRV-ATTR:user_data": None,
"trusted_image_certificates": None,
"OS-EXT-AZ:availability_zone": "nova",
"OS-EXT-SRV-ATTR:kernel_id": '',
"OS-EXT-SRV-ATTR:reservation_id": '',
"config_drive": None,
"OS-EXT-SRV-ATTR:host": None,
"OS-EXT-SRV-ATTR:hypervisor_hostname": None,
"OS-EXT-SRV-ATTR:hostname": 'test_server',
"OS-EXT-SRV-ATTR:instance_name": "instance-00000001",
"key_name": '',
"locked": False,
"description": None,
"OS-SRV-USG:launched_at": None,
"OS-SRV-USG:terminated_at": None,
"security_groups": [{'name': 'default'}],
"OS-EXT-STS:task_state": None,
"OS-EXT-STS:vm_state": vm_states.ACTIVE,
"OS-EXT-STS:power_state": 1,
"OS-EXT-SRV-ATTR:launch_index": 0,
"OS-EXT-SRV-ATTR:ramdisk_id": '',
"os-extended-volumes:volumes_attached": [
{'id': 'some_volume_1', 'delete_on_termination': True},
{'id': 'some_volume_2', 'delete_on_termination': False},
]
},
{
'created': '1955-11-05T00:00:00Z',
'id': uuids.fake1,
'tenant_id': fakes.FAKE_PROJECT_ID,
"status": "UNKNOWN",
"links": [
{
"rel": "self",
"href": "http://localhost/v2/%s/servers/%s" %
(self.project_id, uuids.fake1),
},
{
"rel": "bookmark",
"href": "http://localhost/%s/servers/%s" %
(self.project_id, uuids.fake1),
},
],
}]
}
self.assertThat(output, matchers.DictMatches(expected))
def test_get_server_list_with_down_cells(self):
# Fake out 1 partially constructued instance and one full instance.
self.instances = [
self.instance,
objects.Instance(
context=self.ctxt,
uuid=uuids.fake1,
project_id=fakes.FAKE_PROJECT_ID,
created_at=datetime.datetime(1955, 11, 5)
)
]
req = self.req('/%s/servers' % self.project_id)
output = self.view_builder.index(req, self.instances, True)
self.assertEqual(2, len(output['servers']))
expected = {
"servers": [{
"id": self.uuid,
"name": "test_server",
"links": [
{
"rel": "self",
"href": self.self_link,
},
{
"rel": "bookmark",
"href": self.bookmark_link,
},
]
},
{
'id': uuids.fake1,
"status": "UNKNOWN",
"links": [
{
"rel": "self",
"href": "http://localhost/v2/%s/servers/%s" %
(self.project_id, uuids.fake1),
},
{
"rel": "bookmark",
"href": "http://localhost/%s/servers/%s" %
(self.project_id, uuids.fake1),
},
],
}]
}
self.assertThat(output, matchers.DictMatches(expected))
def test_get_server_with_down_cells(self):
# Fake out 1 partially constructued instance.
self.instance = objects.Instance(
context=self.ctxt,
uuid=self.uuid,
project_id=self.instance.project_id,
created_at=datetime.datetime(1955, 11, 5),
user_id=self.instance.user_id,
image_ref=self.instance.image_ref,
power_state=0,
flavor=self.instance.flavor,
availability_zone=self.instance.availability_zone
)
req = self.req('/%s/servers/%s' % (self.project_id, FAKE_UUID))
output = self.view_builder.show(req, self.instance,
cell_down_support=True)
# ten fields from request_spec and instance_mapping
self.assertEqual(10, len(output['server']))
image_bookmark = "http://localhost/%s/images/5" % self.project_id
expected = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"created": '1955-11-05T00:00:00Z',
"status": "UNKNOWN",
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
'disk': 1,
'ephemeral': 1,
'vcpus': 1,
'ram': 256,
'original_name': 'flavor1',
'extra_specs': {},
'swap': 0
},
"OS-EXT-AZ:availability_zone": "nova",
"OS-EXT-STS:power_state": 0,
"links": [
{
"rel": "self",
"href": "http://localhost/v2/%s/servers/%s" %
(self.project_id, self.uuid),
},
{
"rel": "bookmark",
"href": "http://localhost/%s/servers/%s" %
(self.project_id, self.uuid),
},
]
}
}
self.assertThat(output, matchers.DictMatches(expected))
def test_get_server_without_image_avz_user_id_set_from_down_cells(self):
# Fake out 1 partially constructued instance.
self.instance = objects.Instance(
context=self.ctxt,
uuid=self.uuid,
project_id=self.instance.project_id,
created_at=datetime.datetime(1955, 11, 5),
user_id=None,
image_ref=None,
power_state=0,
flavor=self.instance.flavor,
availability_zone=None
)
req = self.req('/%s/servers/%s' % (self.project_id, FAKE_UUID))
output = self.view_builder.show(req, self.instance,
cell_down_support=True)
# nine fields from request_spec and instance_mapping
self.assertEqual(10, len(output['server']))
expected = {
"server": {
"id": self.uuid,
"user_id": "UNKNOWN",
"tenant_id": "fake_project",
"created": '1955-11-05T00:00:00Z',
"status": "UNKNOWN",
"image": "",
"flavor": {
'disk': 1,
'ephemeral': 1,
'vcpus': 1,
'ram': 256,
'original_name': 'flavor1',
'extra_specs': {},
'swap': 0
},
"OS-EXT-AZ:availability_zone": "UNKNOWN",
"OS-EXT-STS:power_state": 0,
"links": [
{
"rel": "self",
"href": "http://localhost/v2/%s/servers/%s" %
(self.project_id, self.uuid),
},
{
"rel": "bookmark",
"href": "http://localhost/%s/servers/%s" %
(self.project_id, self.uuid),
},
]
}
}
self.assertThat(output, matchers.DictMatches(expected))
class ServersAllExtensionsTestCase(test.TestCase):
"""Servers tests using default API router with all extensions enabled.
The intent here is to catch cases where extensions end up throwing
an exception because of a malformed request before the core API
gets a chance to validate the request and return a 422 response.
For example, AccessIPsController extends servers.Controller::
| @wsgi.extends
| def create(self, req, resp_obj, body):
| context = req.environ['nova.context']
| if authorize(context) and 'server' in resp_obj.obj:
| resp_obj.attach(xml=AccessIPTemplate())
| server = resp_obj.obj['server']
| self._extend_server(req, server)
we want to ensure that the extension isn't barfing on an invalid
body.
"""
def setUp(self):
super(ServersAllExtensionsTestCase, self).setUp()
self.app = compute.APIRouterV21()
@mock.patch.object(compute_api.API, 'create',
side_effect=test.TestingException(
"Should not reach the compute API."))
def test_create_missing_server(self, mock_create):
# Test create with malformed body.
req = fakes.HTTPRequestV21.blank(
'/%s/servers' % fakes.FAKE_PROJECT_ID)
req.method = 'POST'
req.content_type = 'application/json'
body = {'foo': {'a': 'b'}}
req.body = jsonutils.dump_as_bytes(body)
res = req.get_response(self.app)
self.assertEqual(400, res.status_int)
def test_update_missing_server(self):
# Test update with malformed body.
req = fakes.HTTPRequestV21.blank(
'/%s/servers/1' % fakes.FAKE_PROJECT_ID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {'foo': {'a': 'b'}}
req.body = jsonutils.dump_as_bytes(body)
with mock.patch('nova.objects.Instance.save') as mock_save:
res = req.get_response(self.app)
self.assertFalse(mock_save.called)
self.assertEqual(400, res.status_int)
class ServersInvalidRequestTestCase(test.TestCase):
"""Tests of places we throw 400 Bad Request from."""
def setUp(self):
super(ServersInvalidRequestTestCase, self).setUp()
self.controller = servers.ServersController()
def _invalid_server_create(self, body):
req = fakes.HTTPRequestV21.blank(
'/%s/servers' % fakes.FAKE_PROJECT_ID)
req.method = 'POST'
self.assertRaises(exception.ValidationError,
self.controller.create, req, body=body)
def test_create_server_no_body(self):
self._invalid_server_create(body=None)
def test_create_server_missing_server(self):
body = {'foo': {'a': 'b'}}
self._invalid_server_create(body=body)
def test_create_server_malformed_entity(self):
body = {'server': 'string'}
self._invalid_server_create(body=body)
def _unprocessable_server_update(self, body):
req = fakes.HTTPRequestV21.blank(
'/%s/servers/%s' % (fakes.FAKE_PROJECT_ID, FAKE_UUID))
req.method = 'PUT'
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.update, req, FAKE_UUID, body=body)
def test_update_server_no_body(self):
self._invalid_server_create(body=None)
def test_update_server_missing_server(self):
body = {'foo': {'a': 'b'}}
self._invalid_server_create(body=body)
def test_create_update_malformed_entity(self):
body = {'server': 'string'}
self._invalid_server_create(body=body)
class ServersActionsJsonTestV239(test.NoDBTestCase):
def setUp(self):
super(ServersActionsJsonTestV239, self).setUp()
self.controller = servers.ServersController()
self.req = fakes.HTTPRequest.blank('', version='2.39')
@mock.patch.object(common, 'check_img_metadata_properties_quota')
@mock.patch.object(common, 'get_instance')
def test_server_create_image_no_quota_checks(self, mock_get_instance,
mock_check_quotas):
# 'mock_get_instance' helps to skip the whole logic of the action,
# but to make the test
mock_get_instance.side_effect = webob.exc.HTTPNotFound
body = {
'createImage': {
'name': 'Snapshot 1',
},
}
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._action_create_image, self.req,
FAKE_UUID, body=body)
# starting from version 2.39 no quota checks on Nova side are performed
# for 'createImage' action after removing 'image-metadata' proxy API
mock_check_quotas.assert_not_called()
|
import React, { useState } from 'react'
import { NavLink } from 'react-router-dom'
import {
Container,
Wrapper,
Text,
InputGroup,
Input,
Button,
Correct,
Error,
Message,
} from '../../styles/Form.element'
import { Loader } from '../../components/imports'
import { useAuth } from '../../global/exports'
const _MailReset = () => {
const { loading, emailReset, message, setMessage } = useAuth()
const [email, setEmail] = useState('')
const [validationEmail, setValidationEmail] = useState({
validate1: false,
validate2: false,
})
const handleEmailValdiation = (e) => {
setEmail(e.target.value)
setMessage({
type: '',
content: '',
})
email.length <= 1 &&
setValidationEmail({
validate1: true,
validate2: false,
})
email.length > 1 &&
setValidationEmail({
validate1: false,
validate2: true,
})
}
const handleSubmission = () => {
if (validationEmail.validate2) {
emailReset(email)
} else {
setMessage({
type: 'error',
content: 'Please Enter your Infos',
})
}
}
const CorrectStyled = {
background: ({ theme }) => theme.correct,
}
const WrongStyled = {
background: ({ theme }) => theme.error,
}
return (
<>
{' '}
<Container>
<Wrapper>
<Text to='/'>medEspoir</Text>
{message.type === 'error' && (
<Message style={WrongStyled}>{message.content}</Message>
)}
{message.type === 'success' && (
<Message style={CorrectStyled}>{message.content}</Message>
)}
<InputGroup className='fieldgroup'>
<Input
type='text'
autoComplete='no'
placeholder='E-mail'
onChange={handleEmailValdiation}
/>
{validationEmail.validate1 && <Error />}
{validationEmail.validate2 && <Correct />}
</InputGroup>
<InputGroup>
<Button disabled={loading} onClick={handleSubmission}>
{loading ? <Loader /> : 'Send Mail'}
</Button>
</InputGroup>
<InputGroup style={{ flexDirection: 'column', alignItems: 'center' }}>
<p>
To access you account? <NavLink to='/login'>Login</NavLink> here
</p>
</InputGroup>
</Wrapper>
</Container>
</>
)
}
export default _MailReset
|
import {
dateToString
} from 'shlack/utils/date';
import {
module,
test
} from 'qunit';
module('Unit | Utility | date', function () {
// Replace this with your real tests.
test('string inputs', function (assert) {
assert.equal(
dateToString('04/05/1983'),
'Apr 5, 1983 00:00.00 AM',
'MM/DD/YYYY'
);
assert.equal(
dateToString('4/5/1983'),
'Apr 5, 1983 00:00.00 AM',
'M/D/YYYY'
);
assert.equal(
dateToString('26 June 2010 13:14'),
'Jun 26, 2010 01:14.00 PM',
'26 June 2010 13:14'
);
});
test('empty and invalid inputs', function (assert) {
// @ts-ignore
assert.equal(dateToString(), null);
// @ts-ignore
assert.equal(dateToString(null), null);
// @ts-ignore
assert.equal(dateToString([]), null);
// @ts-ignore
assert.equal(dateToString({}), null);
});
});
|
#include <string.h>
#include "sha384.h"
extern void crypto_sha384_sha512_init(sha512_ctx_t *ctx, int is_384);
/*
* SHA-384 process init
*/
void crypto_sha384_init( sha512_ctx_t *ctx )
{
crypto_sha384_sha512_init(ctx, 1);
}
/*
* SHA-384 process buffer
*/
void crypto_sha384_update( sha512_ctx_t *ctx,
const uint8_t *data,
uint32_t len )
{
crypto_sha512_update(ctx, data, len);
}
/*
* SHA-384 final digest
*/
void crypto_sha384_final( sha512_ctx_t *ctx,
uint8_t *digest )
{
crypto_sha512_final(ctx, digest);
}
|
import Document, {
Html, Head, Main, NextScript,
} from 'next/document';
export default class MyDocument extends Document {
render() {
const GA_TRACKING_ID = process.env.GTAG;
return (
<Html lang="en" style={{ scrollBehavior: '' }}>
<Head>
<link href="https://fonts.googleapis.com/css2?family=Raleway&family=Playfair+Display:ital,wght@0,500;1,400&display=swap" rel="stylesheet" />
<link rel="apple-touch-icon" sizes="180x180" href="/apple-touch-icon.png" />
<link rel="icon" type="image/png" sizes="32x32" href="/favicon-32x32.png" />
<link rel="icon" type="image/png" sizes="16x16" href="/favicon-16x16.png" />
<link rel="manifest" href="/site.webmanifest" />
<script
async
src={`https://www.googletagmanager.com/gtag/js?id=${GA_TRACKING_ID}`}
/>
<script
// eslint-disable-next-line react/no-danger
dangerouslySetInnerHTML={{
__html: `
window.dataLayer = window.dataLayer || [];
function gtag(){dataLayer.push(arguments);}
gtag('js', new Date());
gtag('config', '${GA_TRACKING_ID}', {
page_path: window.location.pathname,
});
`,
}}
/>
</Head>
<body>
<Main />
<NextScript />
</body>
</Html>
);
}
}
|
#!/usr/bin/env python
import rospy
from nav_msgs.msg import OccupancyGrid
from std_msgs.msg import Int16
from geometry_msgs.msg import Twist
from nav_msgs.msg import Odometry
from sensor_msgs.msg import LaserScan
import math
import tf
import numpy as np
from std_msgs.msg import String
## developping...
def distance_dot2line(a, b, c, x0, y0):
distance = abs(x0*a + y0*b + c)/math.sqrt(a*a + b*b)
return distance
def distance_dot2dot(x1, y1, x2, y2):
distance = math.sqrt((x2 - x1)*(x2 - x1) + (y2 - y1)*(y2-y1))
return distance
def theta_dot2dot(start, end):
theta = math.atan2(end[1]-start[1], end[0]-start[0])
return theta
def euler_from_quaternion(rot):
quaternion = (rot)
theta = tf.transformations.euler_from_quaternion(quaternion)[2]
return theta
def sign(num):
if num < 0:
return -1
else:
return 1
class Orientation(object):
def __init__(self, trans, rot):
self.x = trans[0]
self.y = trans[1]
self.theta = euler_from_quaternion(rot)
class Maze_pathfinder():
def __init__(self):
self._sub = rospy.Subscriber('/odom', Odometry, self.callback2, queue_size=1)
self._sub = rospy.Subscriber('/scan', LaserScan, self.callback3, queue_size=1)
self._sub_2 = rospy.Subscriber('/command_maze', String, self.receiver_from_core, queue_size=1)
self._pub = rospy.Publisher('/cmd_vel', Twist, queue_size=1)
self._pub2 = rospy.Publisher('/maze', String, queue_size=1)
self.state = 'setting_start_and_goal' # path_finding, stop, going, direction_setting
# variables used in maze solve
self.sell_size = 0.1
self.car_size = 0.1
# variables used in move to enterance and exit
self.position_now = None
self.theta_now = None
self.standard_theta = None
self.scan = None
self.start_point = None
self.exit_point = None
self.exit_point2 = None
self.current_direction = None
self.theta_from_direction = {'right':0, 'top':np.pi/2, 'left':np.pi, 'bottom':np.pi*3/2}
self.angle_from_direction = {'right':0, 'top':90, 'left':180, 'bottom':270}
self.moving_point = None
def callback2(self, odometry):
self.position_now = [odometry.pose.pose.position.x, odometry.pose.pose.position.y]
quaternion = (odometry.pose.pose.orientation.x, odometry.pose.pose.orientation.y, odometry.pose.pose.orientation.z, odometry.pose.pose.orientation.w)
self.theta_now = euler_from_quaternion(quaternion)
print self.state
if self.state == "setting_start_and_goal":
self.standard_theta = self.theta_now - np.pi/2
self.start_point = [self.position_now[0] + math.cos(self.standard_theta + np.pi/2) * 0.3, self.position_now[1] + math.sin(self.standard_theta + np.pi/2) * 0.3]
self.state = "move_to_start_point"
if self.state == "move_to_start_point":
self.move_to_some_point(self.position_now, self.theta_now, self.start_point)
distance_remain = distance_dot2dot(self.position_now[0], self.position_now[1], self.start_point[0], self.start_point[1])
if distance_remain < 0.02:
self.current_direction = "top"
self.state = "path_finding" # now maze solve start!!
if self.state == "path_finding":
if abs(self.start_point[0] - self.position_now[0]) > 1.5 and abs(self.start_point[1] - self.position_now[1]) > 1.5:
self.check_exit()
if self.state == "move_to_exit":
return
if self.current_direction == "top":
obstacle = self.obs_check("right", self.current_direction)
if obstacle == "no":
self.state = "moving"
self.current_direction = "right"
else:
obstacle = self.obs_check("top", self.current_direction)
if obstacle == "no":
self.state = "moving"
self.current_direction = "top"
else:
obstacle = self.obs_check("left", self.current_direction)
if obstacle == "no":
self.state = "moving"
self.current_direction = "left"
else:
self.state = "moving"
self.current_direction = "bottom"
elif self.current_direction == "left":
obstacle = self.obs_check("top", self.current_direction)
if obstacle == "no":
self.state = "moving"
self.current_direction = "top"
else:
obstacle = self.obs_check("left", self.current_direction)
if obstacle == "no":
self.state = "moving"
self.current_direction = "left"
else:
obstacle = self.obs_check("bottom", self.current_direction)
if obstacle == "no":
self.state = "moving"
self.current_direction = "bottom"
else:
self.state = "moving"
self.current_direction = "right"
elif self.current_direction == "bottom":
obstacle = self.obs_check("left", self.current_direction)
if obstacle == "no":
self.state = "moving"
self.current_direction = "left"
else:
obstacle = self.obs_check("bottom", self.current_direction)
if obstacle == "no":
self.state = "moving"
self.current_direction = "bottom"
else:
obstacle = self.obs_check("right", self.current_direction)
if obstacle == "no":
self.state = "moving"
self.current_direction = "right"
else:
self.state = "moving"
self.current_direction = "top"
elif self.current_direction == "right":
obstacle = self.obs_check("bottom", self.current_direction)
if obstacle == "no":
self.state = "moving"
self.current_direction = "bottom"
else:
obstacle = self.obs_check("right", self.current_direction)
if obstacle == "no":
self.state = "moving"
self.current_direction = "right"
else:
obstacle = self.obs_check("top", self.current_direction)
if obstacle == "no":
self.state = "moving"
self.current_direction = "top"
else:
self.state = "moving"
self.current_direction = "left"
if self.state == "moving":
self.move()
if self.state == "move_to_exit":
self.move_to_some_point(self.position_now, self.theta_now, self.exit_point)
if distance_dot2dot(self.position_now[0], self.position_now[1], self.exit_point[0], self.exit_point[1]) < 0.005:
self.state = "move_to_exit2"
if self.state == "move_to_exit2":
self.move_to_some_point(self.position_now, self.theta_now, self.exit_point2)
if distance_dot2dot(self.position_now[0], self.position_now[1], self.exit_point[0], self.exit_point[1]) < 0.005:
self.state = "maze_end"
self._pub2.publish(self.state)
def check_exit(self):
index1 = 1000
index2 = 1000
for i in range((90 - self.angle_from_direction[self.current_direction] + 360)%360, (90 - self.angle_from_direction[self.current_direction] + 360 + 90)%360):
if self.scan[i] <0.3 and self.scan[i+1] > 1:
index2 = i
for i in range((180 - self.angle_from_direction[self.current_direction] + 360)%360, (180 - self.angle_from_direction[self.current_direction] + 360 + 90)%360):
if self.scan[i] <0.3 and self.scan[i+1] > 1:
index1 = i
point1 = [self.position_now[0] + self.scan[index1] * math.cos(index1 * np.pi/180 + self.theta_now), self.position_now[1] + self.scan[index1] * math.sin(index1 * np.pi/180 + self.theta_now)]
point2 = [self.position_now[0] + self.scan[index2] * math.cos(index2 * np.pi/180 + self.theta_now), self.position_now[1] + self.scan[index2] * math.sin(index2 * np.pi/180 + self.theta_now)]
angle_desired = self.standard_theta + np.pi/2
angle_actual = theta_dot2dot(point1, point2)
if abs(angle_actual - angle_desired) < 10 * np.pi /180 and abs(0.3 - distance_dot2dot(point1[0], point1[1], point2[0], point2[1]) < 0.1):
between_point1_and_point2 = [point1[0] + point2[0], point1[1] + point2[1]]
self.exit_point = [between_point1_and_point2[0] + math.cos(angle_actual - np.pi/2)*0.1, between_point1_and_point2[1] + math.sin(angle_actual - np.pi/2)*0.1]
self.exit_point2 = [between_point1_and_point2[0] + math.cos(angle_actual + np.pi/2)*0.1, between_point1_and_point2[1] + math.sin(angle_actual + np.pi/2)*0.1]
self.state = "move_to_exit"
def obs_check(self,check_direction, current_direction):
self.cell = np.zeros((6,4), np.uint8)
obstacle = "no"
for i in range(180):
x_pose = math.cos(i*np.pi/180 + self.theta_from_direction[check_direction] - self.theta_from_direction[current_direction])*self.scan[i]
y_pose = math.sin(i*np.pi/180 + self.theta_from_direction[check_direction] - self.theta_from_direction[current_direction])*self.scan[i]
if abs(x_pose) < 0.15 and y_pose < 0.20:
obstacle = "yes"
if x_pose > 0:
x_num = 3 + int(x_pose*20)
else:
x_num = 2 - int(abs(x_pose)*20)
y_num = int(y_pose*20)
self.cell[x_num][y_num] = 1
return obstacle
def move(self):
if self.moving_point == None:
angle = self.theta_from_direction[self.current_direction] + self.standard_theta
self.moving_point = [self.position_now[0] + math.cos(angle*np.pi/180)*0.05, self.position_now[1] + math.sin(angle*np.pi/180)*0.05]
else:
self.move_to_some_point(self.position_now, self.theta_now, self.moving_point)
if distance_dot2dot(self.position_now[0], self.position_now[1], self.moving_point[0], self.moving_point[1]) < 0.003:
self.stop()
self.state = "path_finding"
self.moving_point = None
def callback3(self, scan):
self.scan = np.zeros((360), np.uint8)
for i in range(360):
if scan.ranges[(i + 270) % 360] != 0:
self.scan[i] = scan.ranges[(i + 270) % 360]
else:
self.scan[i] = 3
def move_to_some_point(self, position_now, theta_now, position_desired):
theta_desired = theta_dot2dot(position_now, position_desired)
diff = abs(theta_desired - theta_now)
if diff > 2*np.pi:
diff -= 2*np.pi
if diff > np.pi/100:
self.setting_angle(theta_now, theta_desired)
else:
self.going_straight()
def setting_angle(self, theta_now, theta_desired):
if theta_desired < 0:
theta_desired += np.pi*2
if theta_desired > theta_now:
if theta_desired - theta_now < np.pi:
turn_direction = 'left'
else:
turn_direction = 'right'
else:
if theta_now - theta_desired < np.pi:
turn_direction = 'right'
else:
turn_direction = 'left'
# publish topic
difference = abs(theta_desired - theta_now)
if difference > np.pi:
difference = np.pi * 2 - difference
if difference > 0.1:
turn_speed = 0.6
elif difference > 0.01:
turn_speed = 0.1
else:
turn_speed = 0
if turn_direction == 'left':
ang_z = turn_speed
else:
ang_z = - turn_speed
self.publishing_vel(0, 0, ang_z, 0, 0, 0)
def going_straight(self):
self.publishing_vel(0, 0, 0, 0.12, 0, 0)
def stop(self):
self.publishing_vel(0, 0, 0, 0, 0, 0)
def publishing_vel(self, angular_x, angular_y, angular_z, linear_x, linear_y, linear_z):
vel = Twist()
vel.angular.x = angular_x
vel.angular.y = angular_y
vel.angular.z = angular_z
vel.linear.x = linear_x
vel.linear.y = linear_y
vel.linear.z = linear_z
self._pub.publish(vel)
def receiver_from_core(self, command):
if command.data == "maze_start":
self.state = "setting_start_and_goal"
def main(self):
rospy.spin()
if __name__ == '__main__':
rospy.init_node('maze_pathfinder')
mazesolver = Maze_pathfinder()
mazesolver.main()
|
/**
* dojox - A version of dojox.js framework that ported to running on skylarkjs.
* @author Hudaokeji, Inc.
* @version v0.9.0
* @link https://github.com/skylark-integration/dojox/
* @license MIT
*/
define(["dojo","dijit","dojox","dijit/_editor/_Plugin","dijit/form/Button","dojo/_base/declare","dojo/string"],function(e,t,n,i){var o=e.declare("dojox.editor.plugins.AutoUrlLink",[i],{_template:"<a _djrealurl='${url}' href='${url}'>${url}</a>",setEditor:function(n){this.editor=n,e.isIE||(e.some(n._plugins,function(e){return!!e.isInstanceOf(t._editor.plugins.EnterKeyHandling)&&(this.blockNodeForEnter=e.blockNodeForEnter,!0)},this),this.connect(n,"onKeyPress","_keyPress"),this.connect(n,"onClick","_recognize"),this.connect(n,"onBlur","_recognize"))},_keyPress:function(t){var n=e.keys,i=t.keyCode,o=t.charCode;o==n.SPACE||t.ctrlKey&&(118==o||86==o)?setTimeout(e.hitch(this,"_recognize"),0):i==n.ENTER?setTimeout(e.hitch(this,function(){this._recognize({enter:!0})}),0):this._saved=this.editor.window.getSelection().anchorNode},_recognize:function(t){var n=this._template,i=!!t&&t.enter,o=this.editor,r=o.window.getSelection();if(console.log("_recognize: isEnter = ",i,", selection is ",r,r.anchorNode,this._findLastEditingNode(r.anchorNode)),r){var s=i?this._findLastEditingNode(r.anchorNode):this._saved||r.anchorNode,a=this._saved=r.anchorNode,d=r.anchorOffset;if(3==s.nodeType&&!this._inLink(s)){var l,c=!1,u=this._findUrls(s,a,d),g=o.document.createRange(),h=0,f=a==s;for(l=u.shift();l;)g.setStart(s,l.start),g.setEnd(s,l.end),r.removeAllRanges(),r.addRange(g),o.execCommand("insertHTML",e.string.substitute(n,{url:g.toString()})),h+=l.end,l=u.shift(),c=!0;if(f&&(d-=h)<=0)return;if(!c)return;try{g.setStart(a,0),g.setEnd(a,d),r.removeAllRanges(),r.addRange(g),o._sCall("collapse",[])}catch(e){}}}},_inLink:function(e){var t=this.editor.editNode,n=!1;for(e=e.parentNode;e&&e!==t;){if("a"==(e.tagName?e.tagName.toLowerCase():"")){n=!0;break}e=e.parentNode}return n},_findLastEditingNode:function(n){var i,o=t.range.BlockTagNames,r=this.editor.editNode;if(!n)return n;if("BR"!=this.blockNodeForEnter||(i=t.range.getBlockAncestor(n,null,r).blockNode)&&"LI"==i.tagName.toUpperCase()){for(n=(i||(i=t.range.getBlockAncestor(n,null,r).blockNode))&&"LI"==i.tagName.toUpperCase()?i:t.range.getBlockAncestor(n,null,r).blockNode;(n=n.previousSibling)&&(!n.tagName||!n.tagName.match(o)););if(n)for(n=n.lastChild;n&&(3!=n.nodeType||""==e.trim(n.nodeValue));)n=1==n.nodeType?n.lastChild:n.previousSibling}else for(;(n=n.previousSibling)&&3!=n.nodeType;);return n},_findUrls:function(e,t,n){var i,o,r=/(http|https|ftp):\/\/[^\s]+/gi,s=[],a=0,d=e.nodeValue;for(e===t&&n<d.length&&(d=d.substr(0,n));null!=(i=r.exec(d));)0!=i.index&&" "!=(o=d.charAt(i.index-1))&&" "!=o||(s.push({start:i.index-a,end:i.index+i[0].length-a}),a=i.index+i[0].length);return s}});return e.subscribe(t._scopeName+".Editor.getPlugin",null,function(e){e.plugin||"autourllink"===e.args.name.toLowerCase()&&(e.plugin=new o)}),o});
//# sourceMappingURL=../../sourcemaps/editor/plugins/AutoUrlLink.js.map
|
'use strict';
const fs = require('fs');
const path = require('path');
module.exports = function (plop) {
const javaDestinationInputRoot = '../java-destination';
const pythonSourceInputRoot = '../source-python';
const singerSourceInputRoot = '../source-singer';
const basesDir = '../../bases';
const outputDir = '../../connectors';
const javaDestinationOutputRoot = `${outputDir}/destination-{{dashCase name}}`
const pythonSourceOutputRoot = `${outputDir}/source-{{dashCase name}}`
const singerSourceOutputRoot = `${outputDir}/source-{{dashCase name}}-singer`
plop.setGenerator('Java Destination', {
description: 'Generate an Airbyte destination written in Java',
prompts: [
{
type: 'input',
name: 'name',
message: 'destination name (without the "destination" suffix; e.g. "my sql")'
}
],
actions: [
{
type: 'add',
path: `${javaDestinationOutputRoot}/build.gradle`,
templateFile: `${javaDestinationInputRoot}/build.gradle.hbs`,
},
{
type: 'add',
path: `${javaDestinationOutputRoot}/src/main/java/io/airbyte/integrations/destination/{{snakeCase name}}/{{properCase name}}Destination.java`,
templateFile: `${javaDestinationInputRoot}/Destination.java.hbs`,
},
{
type: 'add',
path: `${javaDestinationOutputRoot}/Dockerfile`,
templateFile: `${javaDestinationInputRoot}/Dockerfile.hbs`,
},
{
type: 'add',
path: `${javaDestinationOutputRoot}/.dockerignore`,
templateFile: `${javaDestinationInputRoot}/.dockerignore.hbs`,
},
{
type: 'add',
path: `${javaDestinationOutputRoot}/spec.json`,
templateFile: `${javaDestinationInputRoot}/spec.json.hbs`,
},
'Your new connector has been created. Happy coding~~',
],
});
plop.setGenerator('Python Source', {
description: 'Generate an Airbyte Source written in Python',
prompts: [{type: 'input', name: 'name', message: 'Source name, without the "source-" prefix e.g: "google-analytics"'}],
actions: [
{
abortOnFail: true,
type:'addMany',
destination: pythonSourceOutputRoot,
base: pythonSourceInputRoot,
templateFiles: `${pythonSourceInputRoot}/**/**`,
globOptions: {ignore:'.secrets'}
},
// plop doesn't add dotfiles by default so we manually add them
{
type:'add',
abortOnFail: true,
templateFile: `${pythonSourceInputRoot}/.secrets/config.json.hbs`,
path: `${pythonSourceOutputRoot}/secrets/config.json`
},
{
type:'add',
abortOnFail: true,
templateFile: `${pythonSourceInputRoot}/.gitignore.hbs`,
path: `${pythonSourceOutputRoot}/.gitignore`
},
{
type:'add',
abortOnFail: true,
templateFile: `${pythonSourceInputRoot}/.dockerignore.hbs`,
path: `${pythonSourceOutputRoot}/.dockerignore`
},
function(answers, config, plop){
const renderedOutputDir = plop.renderString(pythonSourceOutputRoot, answers);
fs.symlinkSync(`${basesDir}/base-python/base_python`, `${renderedOutputDir}/base_python`);
fs.symlinkSync(`${basesDir}/airbyte-protocol/airbyte_protocol`, `${renderedOutputDir}/airbyte_protocol`);
},
'Your new Python source connector has been created. Follow the instructions and TODOs in the newly created package for next steps. Happy coding! 🐍🐍',]
});
plop.setGenerator('Singer-based Python Source', {
description: 'Generate an Airbyte Source written on top of a Singer Tap.',
prompts: [{type: 'input', name: 'name', message: 'Source name, without the "source-" prefix e.g: "google-analytics"'}],
actions: [
{
abortOnFail: true,
type:'addMany',
destination: singerSourceOutputRoot,
base: singerSourceInputRoot,
templateFiles: `${singerSourceInputRoot}/**/**`,
globOptions: {ignore:'.secrets'}
},
{
type:'add',
abortOnFail: true,
templateFile: `${singerSourceInputRoot}/.secrets/config.json.hbs`,
path: `${singerSourceOutputRoot}/secrets/config.json`
},
{
type:'add',
abortOnFail: true,
templateFile: `${singerSourceInputRoot}/.gitignore.hbs`,
path: `${singerSourceOutputRoot}/.gitignore`
},
{
type:'add',
abortOnFail: true,
templateFile: `${singerSourceInputRoot}/.dockerignore.hbs`,
path: `${singerSourceOutputRoot}/.dockerignore`
},
function(answers, config, plop){
const renderedOutputDir = plop.renderString(singerSourceOutputRoot, answers);
fs.symlinkSync(`${basesDir}/base-python/base_python`, `${renderedOutputDir}/base_python`);
fs.symlinkSync(`${basesDir}/airbyte-protocol/airbyte_protocol`, `${renderedOutputDir}/airbyte_protocol`);
fs.symlinkSync(`${basesDir}/base-singer/base_singer`, `${renderedOutputDir}/base_singer`);
},
'Your new Singer-based source connector has been created. Follow the instructions and TODOs in the newly created package for next steps. Happy coding! 🐍🐍',
]
});
};
|
from flask import Flask, Blueprint, request
from flask_restful import Resource, Api
from prometheus_flask_exporter import PrometheusMetrics
app = Flask(__name__)
blueprint = Blueprint('api_v1', __name__, url_prefix='/api/v1')
restful_api = Api(blueprint)
metrics = PrometheusMetrics(app)
class Test(Resource):
status = 200
@staticmethod
@metrics.summary('test_by_status', 'Test Request latencies by status', labels={
'code': lambda r: r.status_code
})
def get():
if 'fail' in request.args:
return 'Not OK', 400
else:
return 'OK'
restful_api.add_resource(Test, '/test', endpoint='test')
app.register_blueprint(blueprint)
if __name__ == '__main__':
app.run('0.0.0.0', 4000)
|
/** @jsx h */
import { List } from 'immutable'
export const input = cnxml`
<para id="p1">Some text<list id="l1">
<item>List item</item>
</list>More text<figure id="f1">
<media alt="This should not be inline">
<image src="f1.png" />
</media>
</figure>Even more text</para>
`
export const outputContent = <value>
<document>
<p key="p1">Some text</p>
<ul key="l1" class={List()}>
<li>
<p>List item</p>
</li>
</ul>
<p>More text</p>
<figure key="f1" class={List()}>
<media alt="This should not be inline">
<img src="f1.png"><text/></img>
<mediaalt>This should not be inline</mediaalt>
</media>
</figure>
<p>Even more text</p>
</document>
</value>
|
"""
Django settings for apartment_notifier project.
Generated by 'django-admin startproject' using Django 3.1.
For more information on this file, see
https://docs.djangoproject.com/en/dev/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/dev/ref/settings/
"""
from pathlib import Path
import django_heroku
import os
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve(strict=True).parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/dev/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.environ['SECRET_KEY']
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'find_apartments.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'find_apartments.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = BASE_DIR / 'static'
# Activate Django-Heroku.
django_heroku.settings(locals())
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var tslib_1 = require("tslib");
var React = tslib_1.__importStar(require("react"));
var styled_icon_1 = require("@styled-icons/styled-icon");
exports.PhonelinkOff = React.forwardRef(function (props, ref) {
var attrs = {
"fill": "currentColor",
"xmlns": "http://www.w3.org/2000/svg",
};
return (React.createElement(styled_icon_1.StyledIconBase, tslib_1.__assign({ iconAttrs: attrs, iconVerticalAlign: "middle", iconViewBox: "0 0 24 24" }, props, { ref: ref }),
React.createElement("path", { fill: "none", d: "M0 0h24v24H0zm0 0h24v24H0z", key: "k0" }),
React.createElement("path", { d: "M22 6V4H6.82l2 2H22zM1.92 1.65L.65 2.92l1.82 1.82C2.18 5.08 2 5.52 2 6v11H0v3h17.73l2.35 2.35 1.27-1.27L3.89 3.62 1.92 1.65zM4 6.27L14.73 17H4V6.27zM23 8h-6c-.55 0-1 .45-1 1v4.18l2 2V10h4v7h-2.18l3 3H23c.55 0 1-.45 1-1V9c0-.55-.45-1-1-1z", key: "k1" })));
});
exports.PhonelinkOff.displayName = 'PhonelinkOff';
exports.PhonelinkOffDimensions = { height: 24, width: 24 };
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [START get_service_account_tokens]
from firebase_admin import credentials
cred = credentials.Certificate('path/to/serviceAccountKey.json')
access_token_info = cred.get_access_token()
access_token = access_token_info.access_token
expiration_time = access_token_info.expiry
# Attach access_token to HTTPS request in the "Authorization: Bearer" header
# After expiration_time, you must generate a new access token
# [END get_service_account_tokens]
print 'The access token {} expires at {}'.format(access_token, expiration_time)
|
// This is a generated source file for Chilkat version 9.5.0.76
#ifndef _C_CkByteData_H
#define _C_CkByteData_H
#include "chilkatDefs.h"
#include "Chilkat_C.h"
CK_VISIBLE_PUBLIC HCkByteData CkByteData_Create(void);
CK_VISIBLE_PUBLIC void CkByteData_Dispose(HCkByteData handle);
CK_VISIBLE_PUBLIC BOOL CkByteData_getSecureClear(HCkByteData cHandle);
CK_VISIBLE_PUBLIC void CkByteData_putSecureClear(HCkByteData cHandle, BOOL newVal);
CK_VISIBLE_PUBLIC void CkByteData_append(HCkByteData cHandle, HCkByteData db);
CK_VISIBLE_PUBLIC void CkByteData_append2(HCkByteData cHandle, const unsigned char *pByteData, unsigned long szByteData);
CK_VISIBLE_PUBLIC void CkByteData_appendChar(HCkByteData cHandle, char ch);
CK_VISIBLE_PUBLIC void CkByteData_appendCharN(HCkByteData cHandle, char ch, int numTimes);
CK_VISIBLE_PUBLIC void CkByteData_appendEncoded(HCkByteData cHandle, const char *str, const char *encoding);
CK_VISIBLE_PUBLIC void CkByteData_appendEncodedW(HCkByteData cHandle, const wchar_t * str, const wchar_t * encoding);
CK_VISIBLE_PUBLIC BOOL CkByteData_appendFile(HCkByteData cHandle, const char *path);
CK_VISIBLE_PUBLIC BOOL CkByteData_appendFileW(HCkByteData cHandle, const wchar_t * path);
CK_VISIBLE_PUBLIC void CkByteData_appendInt(HCkByteData cHandle, int intValue, BOOL littleEndian);
CK_VISIBLE_PUBLIC void CkByteData_appendRandom(HCkByteData cHandle, int numBytes);
CK_VISIBLE_PUBLIC void CkByteData_appendRange(HCkByteData cHandle, HCkByteData byteData, unsigned long index, unsigned long numBytes);
CK_VISIBLE_PUBLIC void CkByteData_appendShort(HCkByteData cHandle, short shortValue, BOOL littleEndian);
CK_VISIBLE_PUBLIC void CkByteData_appendStr(HCkByteData cHandle, const char *str);
CK_VISIBLE_PUBLIC void CkByteData_appendStrW(HCkByteData cHandle, const wchar_t * str, const wchar_t * charset);
CK_VISIBLE_PUBLIC BOOL CkByteData_beginsWith(HCkByteData cHandle, HCkByteData byteDataObj);
CK_VISIBLE_PUBLIC BOOL CkByteData_beginsWith2(HCkByteData cHandle, const unsigned char *pByteData, unsigned long szByteData);
CK_VISIBLE_PUBLIC void CkByteData_borrowData(HCkByteData cHandle, const unsigned char *pByteData, unsigned long szByteData);
CK_VISIBLE_PUBLIC void CkByteData_byteSwap4321(HCkByteData cHandle);
CK_VISIBLE_PUBLIC void CkByteData_clear(HCkByteData cHandle);
CK_VISIBLE_PUBLIC void CkByteData_encode(HCkByteData cHandle, const char *encoding, HCkString str);
CK_VISIBLE_PUBLIC void CkByteData_encodeW(HCkByteData cHandle, const wchar_t * encoding, HCkString str);
CK_VISIBLE_PUBLIC BOOL CkByteData_ensureBuffer(HCkByteData cHandle, unsigned long expectedNumBytes);
CK_VISIBLE_PUBLIC BOOL CkByteData_equals(HCkByteData cHandle, HCkByteData compareBytes);
CK_VISIBLE_PUBLIC BOOL CkByteData_equals2(HCkByteData cHandle, const unsigned char *pCompareBytes, unsigned long numBytes);
CK_VISIBLE_PUBLIC int CkByteData_findBytes(HCkByteData cHandle, HCkByteData byteDataObj);
CK_VISIBLE_PUBLIC int CkByteData_findBytes2(HCkByteData cHandle, const unsigned char *findBytes, unsigned long findBytesLen);
CK_VISIBLE_PUBLIC unsigned char CkByteData_getByte(HCkByteData cHandle, unsigned long byteIndex);
CK_VISIBLE_PUBLIC const unsigned char *CkByteData_getBytes(HCkByteData cHandle);
CK_VISIBLE_PUBLIC char CkByteData_getChar(HCkByteData cHandle, unsigned long byteIndex);
CK_VISIBLE_PUBLIC const unsigned char *CkByteData_getData(HCkByteData cHandle);
CK_VISIBLE_PUBLIC const unsigned char *CkByteData_getDataAt(HCkByteData cHandle, unsigned long byteIndex);
CK_VISIBLE_PUBLIC const wchar_t * CkByteData_getEncodedW(HCkByteData cHandle, const wchar_t * encoding);
CK_VISIBLE_PUBLIC int CkByteData_getInt(HCkByteData cHandle, unsigned long byteIndex);
CK_VISIBLE_PUBLIC const unsigned char *CkByteData_getRange(HCkByteData cHandle, unsigned long byteIndex, unsigned long numBytes);
CK_VISIBLE_PUBLIC short CkByteData_getShort(HCkByteData cHandle, unsigned long byteIndex);
CK_VISIBLE_PUBLIC unsigned long CkByteData_getSize(HCkByteData cHandle);
CK_VISIBLE_PUBLIC unsigned long CkByteData_getUInt(HCkByteData cHandle, unsigned long byteIndex);
CK_VISIBLE_PUBLIC unsigned short CkByteData_getUShort(HCkByteData cHandle, unsigned long byteIndex);
CK_VISIBLE_PUBLIC BOOL CkByteData_is7bit(HCkByteData cHandle);
CK_VISIBLE_PUBLIC BOOL CkByteData_loadFile(HCkByteData cHandle, const char *path);
CK_VISIBLE_PUBLIC BOOL CkByteData_loadFileW(HCkByteData cHandle, const wchar_t * path);
CK_VISIBLE_PUBLIC void CkByteData_pad(HCkByteData cHandle, int blockSize, int paddingScheme);
CK_VISIBLE_PUBLIC BOOL CkByteData_preAllocate(HCkByteData cHandle, unsigned long expectedNumBytes);
CK_VISIBLE_PUBLIC void CkByteData_removeChunk(HCkByteData cHandle, unsigned long startIndex, unsigned long numBytes);
CK_VISIBLE_PUBLIC const unsigned char *CkByteData_removeData(HCkByteData cHandle);
CK_VISIBLE_PUBLIC void CkByteData_replaceChar(HCkByteData cHandle, unsigned char existingByteValue, unsigned char replacementByteValue);
CK_VISIBLE_PUBLIC BOOL CkByteData_saveFile(HCkByteData cHandle, const char *path);
CK_VISIBLE_PUBLIC BOOL CkByteData_saveFileW(HCkByteData cHandle, const wchar_t * path);
CK_VISIBLE_PUBLIC void CkByteData_shorten(HCkByteData cHandle, unsigned long numBytes);
CK_VISIBLE_PUBLIC const wchar_t * CkByteData_to_ws(HCkByteData cHandle, const char *charset);
CK_VISIBLE_PUBLIC void CkByteData_unpad(HCkByteData cHandle, int blockSize, int paddingScheme);
#endif
|
import React, {Component} from 'react';
// import {connect} from 'react-redux';
export default class Output extends React.Component {
// renderMediaID(mediaData){
// return(
// // <div>{mediaData.media_id}</div>
// <div>Output</div>
// );
// }
// <div className='url-output'> {this.state.mediaid.map(this.renderMediaID)} </div>
render(){
return(
<div className='url-output'> Output </div>
);
}
}
//
// function mapStateToProps(state){
// return { mediaid: state.mediaid };
// }
//
// export default connect(mapStateToProps,Output);
|
/*
* Copyright (C) 2002-2006 Manuel Novoa III <mjn3@uclibc.org>
*
* Licensed under the LGPL v2.1, see the file COPYING.LIB in this tarball.
*/
#define L_wctype_l
#define __UCLIBC_DO_XLOCALE
#include "_wctype.c"
|
/* eslint-disable import/no-mutable-exports,max-len */
import 'abortcontroller-polyfill/dist/abortcontroller-polyfill-only'
import { getWeb3 } from 'helpers/web3'
import * as bitcoin from 'bitcoinjs-lib'
import * as ghost from 'bitcoinjs-lib'
import abi from 'human-standard-token-abi'
import Channel from 'ipfs-pubsub-room'
import IPFS from 'ipfs'
import config, { initExternalConfig } from 'helpers/externalConfig'
import helpers, { constants as privateKeys, utils } from 'helpers'
import actions from 'redux/actions'
import SwapApp, { constants } from 'swap.app'
import SwapAuth from 'swap.auth'
import SwapRoom from 'swap.room'
import SwapOrders from 'swap.orders'
import { ETH2BTC, BTC2ETH, ETHTOKEN2BTC, BTC2ETHTOKEN, GHOST2ETH, ETH2GHOST, ETHTOKEN2GHOST, GHOST2ETHTOKEN } from 'swap.flows'
import { GHOST2BTC, BTC2GHOST } from 'swap.flows'
import { EthSwap, EthTokenSwap, BtcSwap, GhostSwap } from 'swap.swaps'
import { pipeline } from 'stream'
import metamask from 'helpers/metamask'
initExternalConfig()
const repo = utils.createRepo()
utils.exitListener()
const createSwapApp = async () => {
const web3 = (metamask.isEnabled() && metamask.isConnected())
? await metamask.getWeb3()
: await getWeb3()
SwapApp.setup({
network: process.env.MAINNET ? 'mainnet' : 'testnet',
env: {
web3,
getWeb3,
bitcoin,
ghost,
coininfo: {
ghost: {
main: helpers.ghost.networks.mainnet,
test: helpers.ghost.networks.testnet,
},
},
Ipfs: IPFS,
IpfsRoom: Channel,
storage: window.localStorage,
sessionStorage: window.sessionStorage,
metamask: (metamask.isEnabled() && metamask.isConnected())
? metamask
: false,
},
// White list (Список адресов btc довереных продавцов)
// whitelistBtc: [],
services: [
new SwapAuth({
// TODO need init swapApp only after private keys created!!!!!!!!!!!!!!!!!!!
eth: localStorage.getItem(privateKeys.privateKeyNames.eth),
btc: localStorage.getItem(privateKeys.privateKeyNames.btc),
ghost: localStorage.getItem(privateKeys.privateKeyNames.ghost),
}),
new SwapRoom({
repo,
config: {
Addresses: {
Swarm: [
config.ipfs.swarm,
],
},
},
}),
new SwapOrders(),
],
swaps: [
new EthSwap({
address: config.swapContract.eth,
/* eslint-disable */
abi: [{ "constant": false, "inputs": [{ "name": "_secret", "type": "bytes32" }, { "name": "_ownerAddress", "type": "address" }], "name": "withdraw", "outputs": [], "payable": false, "stateMutability": "nonpayable", "type": "function" }, { "constant": true, "inputs": [{ "name": "_participantAddress", "type": "address" }], "name": "getSecret", "outputs": [{ "name": "", "type": "bytes32" }], "payable": false, "stateMutability": "view", "type": "function" }, { "constant": true, "inputs": [{ "name": "", "type": "address" }, { "name": "", "type": "address" }], "name": "participantSigns", "outputs": [{ "name": "", "type": "uint256" }], "payable": false, "stateMutability": "view", "type": "function" }, { "constant": false, "inputs": [{ "name": "_secret", "type": "bytes32" }, { "name": "participantAddress", "type": "address" }], "name": "withdrawNoMoney", "outputs": [], "payable": false, "stateMutability": "nonpayable", "type": "function" }, { "constant": true, "inputs": [], "name": "owner", "outputs": [{ "name": "", "type": "address" }], "payable": false, "stateMutability": "view", "type": "function" }, { "constant": false, "inputs": [{ "name": "_secretHash", "type": "bytes20" }, { "name": "_participantAddress", "type": "address" }, { "name": "_targetWallet", "type": "address" }], "name": "createSwapTarget", "outputs": [], "payable": true, "stateMutability": "payable", "type": "function" }, { "constant": true, "inputs": [{ "name": "", "type": "address" }, { "name": "", "type": "address" }], "name": "swaps", "outputs": [{ "name": "targetWallet", "type": "address" }, { "name": "secret", "type": "bytes32" }, { "name": "secretHash", "type": "bytes20" }, { "name": "createdAt", "type": "uint256" }, { "name": "balance", "type": "uint256" }], "payable": false, "stateMutability": "view", "type": "function" }, { "constant": false, "inputs": [{ "name": "_ownerAddress", "type": "address" }, { "name": "_participantAddress", "type": "address" }], "name": "closeSwapByAdminAfterOneYear", "outputs": [], "payable": false, "stateMutability": "nonpayable", "type": "function" }, { "constant": false, "inputs": [{ "name": "_secretHash", "type": "bytes20" }, { "name": "_participantAddress", "type": "address" }], "name": "createSwap", "outputs": [], "payable": true, "stateMutability": "payable", "type": "function" }, { "constant": false, "inputs": [{ "name": "_secret", "type": "bytes32" }, { "name": "_ownerAddress", "type": "address" }, { "name": "participantAddress", "type": "address" }], "name": "withdrawOther", "outputs": [], "payable": false, "stateMutability": "nonpayable", "type": "function" }, { "constant": true, "inputs": [], "name": "ratingContractAddress", "outputs": [{ "name": "", "type": "address" }], "payable": false, "stateMutability": "view", "type": "function" }, { "constant": true, "inputs": [{ "name": "_ownerAddress", "type": "address" }], "name": "getTargetWallet", "outputs": [{ "name": "", "type": "address" }], "payable": false, "stateMutability": "view", "type": "function" }, { "constant": true, "inputs": [], "name": "admin", "outputs": [{ "name": "", "type": "address" }], "payable": false, "stateMutability": "view", "type": "function" }, { "constant": true, "inputs": [{ "name": "_ownerAddress", "type": "address" }], "name": "getBalance", "outputs": [{ "name": "", "type": "uint256" }], "payable": false, "stateMutability": "view", "type": "function" }, { "constant": false, "inputs": [{ "name": "_participantAddress", "type": "address" }], "name": "refund", "outputs": [], "payable": false, "stateMutability": "nonpayable", "type": "function" }, { "inputs": [], "payable": false, "stateMutability": "nonpayable", "type": "constructor" }, { "anonymous": false, "inputs": [{ "indexed": false, "name": "_buyer", "type": "address" }, { "indexed": false, "name": "_seller", "type": "address" }, { "indexed": false, "name": "_value", "type": "uint256" }, { "indexed": false, "name": "_secretHash", "type": "bytes20" }, { "indexed": false, "name": "createdAt", "type": "uint256" }], "name": "CreateSwap", "type": "event" }, { "anonymous": false, "inputs": [{ "indexed": false, "name": "_buyer", "type": "address" }, { "indexed": false, "name": "_seller", "type": "address" }, { "indexed": false, "name": "_secretHash", "type": "bytes20" }, { "indexed": false, "name": "withdrawnAt", "type": "uint256" }], "name": "Withdraw", "type": "event" }, { "anonymous": false, "inputs": [{ "indexed": false, "name": "_buyer", "type": "address" }, { "indexed": false, "name": "_seller", "type": "address" }], "name": "Close", "type": "event" }, { "anonymous": false, "inputs": [{ "indexed": false, "name": "_buyer", "type": "address" }, { "indexed": false, "name": "_seller", "type": "address" }, { "indexed": false, "name": "_secretHash", "type": "bytes20" }], "name": "Refund", "type": "event" }],
/* eslint-enable */
fetchBalance: (address) => actions.eth.fetchBalance(address),
estimateGasPrice: ({ speed } = {}) => helpers.eth.estimateGasPrice({ speed }),
}),
new BtcSwap({
fetchBalance: (address) => actions.btc.fetchBalance(address),
fetchUnspents: (scriptAddress) => actions.btc.fetchUnspents(scriptAddress),
broadcastTx: (txRaw) => actions.btc.broadcastTx(txRaw),
fetchTxInfo: (txid) => actions.btc.fetchTxInfo(txid),
checkWithdraw: (scriptAddress) => actions.btc.checkWithdraw(scriptAddress),
estimateFeeValue: ({ inSatoshis, speed, address, txSize } = {}) => helpers.btc.estimateFeeValue({ inSatoshis, speed, address, txSize }),
}),
new GhostSwap({
fetchBalance: (address) => actions.ghost.fetchBalance(address),
fetchUnspents: (scriptAddress) => actions.ghost.fetchUnspents(scriptAddress),
broadcastTx: (txRaw) => actions.ghost.broadcastTx(txRaw),
fetchTxInfo: (txid) => actions.ghost.fetchTxInfo(txid),
checkWithdraw: (scriptAddress) => actions.ghost.checkWithdraw(scriptAddress),
estimateFeeValue: ({ inSatoshis, speed, address, txSize } = {}) => helpers.ghost.estimateFeeValue({ inSatoshis, speed, address, txSize }),
}),
...(Object.keys(config.erc20)
.map(key =>
new EthTokenSwap({
name: key,
tokenAbi: abi,
address: config.swapContract.erc20,
decimals: config.erc20[key].decimals,
tokenAddress: config.erc20[key].address,
fetchBalance: (address) => actions.token.fetchBalance(address, config.erc20[key].address, config.erc20[key].decimals),
estimateGasPrice: ({ speed } = {}) => helpers.ethToken.estimateGasPrice({ speed }),
/* eslint-disable */
abi: [{ "constant": false, "inputs": [{ "name": "_secret", "type": "bytes32" }, { "name": "_ownerAddress", "type": "address" }], "name": "withdraw", "outputs": [], "payable": false, "stateMutability": "nonpayable", "type": "function" }, { "constant": true, "inputs": [{ "name": "_participantAddress", "type": "address" }], "name": "getSecret", "outputs": [{ "name": "", "type": "bytes32" }], "payable": false, "stateMutability": "view", "type": "function" }, { "constant": false, "inputs": [{ "name": "_secretHash", "type": "bytes20" }, { "name": "_participantAddress", "type": "address" }, { "name": "_targetWallet", "type": "address" }, { "name": "_value", "type": "uint256" }, { "name": "_token", "type": "address" }], "name": "createSwapTarget", "outputs": [], "payable": false, "stateMutability": "nonpayable", "type": "function" }, { "constant": false, "inputs": [{ "name": "_secret", "type": "bytes32" }, { "name": "participantAddress", "type": "address" }], "name": "withdrawNoMoney", "outputs": [], "payable": false, "stateMutability": "nonpayable", "type": "function" }, { "constant": true, "inputs": [], "name": "owner", "outputs": [{ "name": "", "type": "address" }], "payable": false, "stateMutability": "view", "type": "function" }, { "constant": true, "inputs": [{ "name": "", "type": "address" }, { "name": "", "type": "address" }], "name": "swaps", "outputs": [{ "name": "token", "type": "address" }, { "name": "targetWallet", "type": "address" }, { "name": "secret", "type": "bytes32" }, { "name": "secretHash", "type": "bytes20" }, { "name": "createdAt", "type": "uint256" }, { "name": "balance", "type": "uint256" }], "payable": false, "stateMutability": "view", "type": "function" }, { "constant": false, "inputs": [{ "name": "_ownerAddress", "type": "address" }, { "name": "_participantAddress", "type": "address" }], "name": "closeSwapByAdminAfterOneYear", "outputs": [], "payable": false, "stateMutability": "nonpayable", "type": "function" }, { "constant": false, "inputs": [{ "name": "_secretHash", "type": "bytes20" }, { "name": "_participantAddress", "type": "address" }, { "name": "_value", "type": "uint256" }, { "name": "_token", "type": "address" }], "name": "createSwap", "outputs": [], "payable": false, "stateMutability": "nonpayable", "type": "function" }, { "constant": false, "inputs": [{ "name": "_secret", "type": "bytes32" }, { "name": "_ownerAddress", "type": "address" }, { "name": "participantAddress", "type": "address" }], "name": "withdrawOther", "outputs": [], "payable": false, "stateMutability": "nonpayable", "type": "function" }, { "constant": true, "inputs": [{ "name": "tokenOwnerAddress", "type": "address" }], "name": "getTargetWallet", "outputs": [{ "name": "", "type": "address" }], "payable": false, "stateMutability": "view", "type": "function" }, { "constant": true, "inputs": [], "name": "admin", "outputs": [{ "name": "", "type": "address" }], "payable": false, "stateMutability": "view", "type": "function" }, { "constant": true, "inputs": [{ "name": "_ownerAddress", "type": "address" }], "name": "getBalance", "outputs": [{ "name": "", "type": "uint256" }], "payable": false, "stateMutability": "view", "type": "function" }, { "constant": false, "inputs": [{ "name": "_participantAddress", "type": "address" }], "name": "refund", "outputs": [], "payable": false, "stateMutability": "nonpayable", "type": "function" }, { "inputs": [], "payable": false, "stateMutability": "nonpayable", "type": "constructor" }, { "anonymous": false, "inputs": [{ "indexed": false, "name": "token", "type": "address" }, { "indexed": false, "name": "_buyer", "type": "address" }, { "indexed": false, "name": "_seller", "type": "address" }, { "indexed": false, "name": "_value", "type": "uint256" }, { "indexed": false, "name": "_secretHash", "type": "bytes20" }, { "indexed": false, "name": "createdAt", "type": "uint256" }], "name": "CreateSwap", "type": "event" }, { "anonymous": false, "inputs": [{ "indexed": false, "name": "_buyer", "type": "address" }, { "indexed": false, "name": "_seller", "type": "address" }, { "indexed": false, "name": "_secretHash", "type": "bytes20" }, { "indexed": false, "name": "withdrawnAt", "type": "uint256" }], "name": "Withdraw", "type": "event" }, { "anonymous": false, "inputs": [{ "indexed": false, "name": "_buyer", "type": "address" }, { "indexed": false, "name": "_seller", "type": "address" }, { "indexed": false, "name": "_secretHash", "type": "bytes20" }], "name": "Refund", "type": "event" }],
/* eslint-enable */
})
)),
],
flows: [
ETH2BTC,
BTC2ETH,
// GHOST2BTC,
// BTC2GHOST,
GHOST2ETH,
ETH2GHOST,
...(Object.keys(config.erc20))
.map(key => ETHTOKEN2BTC(key)),
...(Object.keys(config.erc20))
.map(key => BTC2ETHTOKEN(key)),
...(Object.keys(config.erc20))
.map(key => ETHTOKEN2GHOST(key)),
...(Object.keys(config.erc20))
.map(key => GHOST2ETHTOKEN(key)),
// ...(Object.keys(config.erc20))
// .map(key => ETHTOKEN2USDT(key)),
//
// ...(Object.keys(config.erc20))
// .map(key => USDT2ETHTOKEN(key)),
],
})
// eslint-disable-next-line
// process.env.MAINNET ? SwapApp.shared()._addSwap(
// new UsdtSwap({
// assetId: 31, // USDT
// fetchBalance: (address) => actions.usdt.fetchBalance(address, 31).then(res => res.balance),
// fetchUnspents: (scriptAddress) => actions.btc.fetchUnspents(scriptAddress),
// broadcastTx: (txRaw) => actions.btc.broadcastTx(txRaw),
// fetchTx: (hash) => actions.btc.fetchTx(hash),
// }),
// ) : null
window.SwapApp = SwapApp.shared()
}
export {
createSwapApp,
}
|
'use strict';
// MODULES //
var main = require( './../lib/index.js' );
var chai = require( 'chai' );
var expect = chai.expect;
var path = require( 'path' );
// FIXTURES //
var file = path.normalize( __dirname + '/fixtures/test.txt' );
var fileBin = path.normalize( __dirname + '/fixtures/test.bin' );
var fileOther = path.normalize( __dirname + '/fixtures/test.x' );
// TESTS //
describe( 'loadModel', function tests() {
it( 'is a callable function', function test() {
expect(main.loadModel).to.be.a( 'function' );
});
it( 'should throw an error if not provided a string for filename', function test() {
var values = [
5,
null,
undefined,
NaN,
true,
[],
{},
function(){}
];
for ( var i = 0; i < values.length; i++ ) {
expect( badValue( values[i] ) ).to.throw( Error );
}
function badValue( value ) {
return function() {
main.loadModel( { file: value } );
};
}
});
it( 'successfully loads a binary model file', function test() {
main.loadModel( {file: fileBin}, function( err, model ) {
expect(err).to.be.null;
expect( model.words ).to.be.equal( 34 );
expect( model.size ).to.be.equal( 50 );
});
});
it( 'successfully loads a binary model file with is_binary option', function test() {
main.loadModel( {file: fileBin, is_binary: true }, function( err, model ) {
expect(err).to.be.null;
expect( model.words ).to.be.equal( 34 );
expect( model.size ).to.be.equal( 50 );
});
});
it( 'should throw an error if not provided is_binary and mime_type is not \'application/octet-stream\'', function test() {
var loadBadFile = () => { main.loadModel( {file: fileOther} ); };
expect( loadBadFile ).to.throw( Error );
});
it( 'successfully loads a plaintext model file', function test() {
main.loadModel( {file: file}, function( err ) {
expect(err).to.be.null;
});
});
it( 'successfully loads a plaintext model file with is_binary option', function test() {
main.loadModel( {file: file}, function( err ) {
expect(err).to.be.null;
});
});
describe( '.getVector()', function tests() {
it( 'retrieves the vector for a given word', function test() {
main.loadModel( {file: file}, function( err, model ) {
var wordVec = model.getVector( 'church' );
expect(wordVec).to.be.a( 'object' );
expect(wordVec).to.have.ownProperty( 'word' );
expect(wordVec).to.have.ownProperty( 'values' );
});
});
});
describe( '.getVectors()', function tests() {
it( 'retrieves the vectors for the given word list', function test( done ) {
main.loadModel( {file: file}, function( err, model ) {
var wordVecArr = model.getVectors( ['and', 'or'] );
expect(wordVecArr).to.be.a( 'array' );
expect(wordVecArr).to.have.property( 'length' );
expect(wordVecArr[0]).to.have.ownProperty( 'word' );
expect(wordVecArr[0]).to.have.ownProperty( 'values' );
done();
});
});
});
describe( '.getNearestWord()', function tests() {
it( 'retrieves the nearest word for the input word vector', function test( done ) {
main.loadModel( {file: file}, function( err, model ) {
var res = model.getNearestWord( model.getVector( 'and' ) );
expect( res ).to.have.ownProperty( 'word' );
expect( res ).to.have.ownProperty( 'dist' );
done();
});
});
});
describe( '.getNearestWords()', function tests() {
it( 'retrieves the nearest words for the input word vector', function test( done ) {
main.loadModel( {file: file}, function( err, model ) {
var res = model.getNearestWords( model.getVector( 'and' ), 3 );
expect( res ).to.be.a( 'array' );
expect( res ).to.have.length( 3 );
expect(res[0]).to.have.ownProperty( 'word' );
expect(res[0]).to.have.ownProperty( 'dist' );
done();
});
});
});
describe( '.mostSimilar()', function tests() {
it( 'retrieves the words most similar to the input word', function test( done ) {
main.loadModel( {file: file}, function( err, model ) {
var res = model.mostSimilar( 'population', 20);
expect( res ).to.be.a( 'array' );
expect( res ).to.have.length( 20 );
expect(res[0]).to.have.ownProperty( 'word' );
expect(res[0]).to.have.ownProperty( 'dist' );
done();
});
});
});
describe( '.similarity()', function tests() {
it( 'calculates the similarity between two words', function test( done ) {
main.loadModel( {file: file}, function( err, model ) {
var res = model.similarity( 'political', 'theory' );
expect( res ).to.be.a( 'number' );
done();
});
});
it( 'returns 1.0 for two identical words', function test( done ) {
main.loadModel( {file: file}, function( err, model ) {
var res = model.similarity( 'political', 'political' );
expect( res ).to.be.a( 'number' );
expect( res ).to.equal( 1.0 );
done();
});
});
});
describe( '.analogy()', function tests() {
it( 'given pair, finds the term which stands in analogous relationship to supplied word', function test( done ) {
main.loadModel( {file: file}, function( err, model ) {
var res = model.analogy( 'any', [ 'and', 'or' ], 10 );
expect( res ).to.be.a( 'array' );
expect( res ).to.have.length( 10 );
done();
});
});
});
});
describe( 'WordVector', function tests() {
it( 'can be added to each other', function test( done ) {
main.loadModel( {file: file}, function( err, model ) {
var wordVec1 = model.getVector( 'and' );
var wordVec2 = model.getVector( 'any' );
var result = wordVec1.add( wordVec2 );
expect( result ).to.be.a.instanceOf( main.WordVector );
done();
});
});
it( 'can be subtracted from each other', function test( done ) {
main.loadModel( {file: file}, function( err, model ) {
var wordVec1 = model.getVector( 'and' );
var wordVec2 = model.getVector( 'any' );
var result = wordVec1.subtract( wordVec2 );
expect( result ).to.be.a.instanceOf( main.WordVector );
done();
});
});
});
|
/* global describe expect it */
import { deleteFile } from '../../../src/utils/storage'
const fakeFirebase = {
_: {
authUid: '123',
config: {
userProfile: 'users',
disableRedirectHandling: true,
},
},
storage: () => ({
ref: () => ({
delete: () => Promise.resolve({ val: () => { some: 'obj' }}),
})
}),
database: () => ({
ref: () => ({
remove: () => Promise.resolve({ }),
child: () => ({
on: () => Promise.resolve({ val: () => { some: 'obj' }}),
off: () => Promise.resolve({ val: () => { some: 'obj' }}),
once: () => Promise.resolve({ val: () => { some: 'obj' }})
})
})
}),
}
describe('Utils: Storage', () => {
describe('deleteFile', () => {
it('returns dbPath', () => {
expect(deleteFile(fakeFirebase, { path: 'some', dbPath: 'some' })).to.eventually.have.keys('dbPath')
})
})
})
|
import React, { Component } from 'react';
import * as PropTypes from 'prop-types';
import { compose, pathOr } from 'ramda';
import { createFragmentContainer } from 'react-relay';
import graphql from 'babel-plugin-relay/macro';
import { withStyles } from '@material-ui/core/styles';
import Typography from '@material-ui/core/Typography';
import ItemMarking from '../../../../components/ItemMarking';
import IndicatorPopover from './IndicatorPopover';
import { truncate } from '../../../../utils/String';
const styles = () => ({
title: {
float: 'left',
textTransform: 'uppercase',
},
popover: {
float: 'left',
marginTop: '-13px',
},
marking: {
float: 'right',
overflowX: 'hidden',
},
});
class IndicatorHeaderComponent extends Component {
render() {
const { classes, variant, indicator } = this.props;
return (
<div>
<Typography
variant="h1"
gutterBottom={true}
classes={{ root: classes.title }}
>
{truncate(indicator.name, 50)}
</Typography>
<div className={classes.popover}>
<IndicatorPopover indicatorId={indicator.id} />
</div>
{variant !== 'noMarking' ? (
<div className={classes.marking}>
{pathOr([], ['objectMarking', 'edges'], indicator).map(
(markingDefinition) => (
<ItemMarking
key={markingDefinition.node.id}
label={markingDefinition.node.definition}
color={markingDefinition.node.x_opencti_color}
/>
),
)}
</div>
) : (
''
)}
<div className="clearfix" />
</div>
);
}
}
IndicatorHeaderComponent.propTypes = {
indicator: PropTypes.object,
variant: PropTypes.string,
classes: PropTypes.object,
};
const IndicatorHeader = createFragmentContainer(IndicatorHeaderComponent, {
indicator: graphql`
fragment IndicatorHeader_indicator on Indicator {
id
entity_type
name
objectMarking {
edges {
node {
id
definition
x_opencti_color
}
}
}
}
`,
});
export default compose(withStyles(styles))(IndicatorHeader);
|
from platform import system as system_os
import argparse
from os import system, environ
from googleapiclient.discovery import build
class Youtube:
def __init__(self, query, count):
self.query = query
self.api_key = environ.get("YOUTUBE_API")
self.response: dict
self.count = count
def search(self) -> None:
service = build('youtube', 'v3', developerKey=self.api_key)
request = service.search().list(q=self.query, part="snippet",
maxResults=self.count, type="video")
response = request.execute()
self.response = response
service.close()
def print_response(self) -> None:
print("\nTotal Requested Result: ", self.count)
print("Total Response Results: ", len(self.response["items"]))
print("\n-------------------------------------------------\n")
for stats in self.response["items"]:
video_id = stats["id"]["videoId"]
video_url = f"https://youtube.com/watch?v={video_id}"
title = stats["snippet"]["title"]
thumbnail = stats["snippet"]["thumbnails"]["high"]["url"]
channel = stats["snippet"]["channelTitle"]
print(title)
print(video_url)
print(thumbnail)
print(channel)
print("\n-------------------------------------------------\n")
if __name__ == "__main__":
argp = argparse.ArgumentParser(
usage="youfor.py -q QUERY -c [COUNT] -o [OUTPUT]")
argp.add_argument("-q", "--query", required=True)
argp.add_argument("-c", "--count", default=10)
parser = argp.parse_args()
query = parser.query
count = parser.count
if system_os() == 'Linux':
system('clear')
elif system_os() == 'Windows':
system('cls')
youtube = Youtube(query=query, count=count)
youtube.search()
youtube.print_response()
|
import httpClient from '@/api/httpClient';
const createTranslation = (body) => {
const endpoint = `/translations`;
return httpClient.post(endpoint, body);
};
const indexTranslation = () => {
const endpoint = `/translations/lawyer`;
return httpClient.get(endpoint);
};
const showTranslationProcess = (translationId) => {
const endpoint = `/translations/${translationId}/progress`;
return httpClient.get(endpoint);
};
export default { createTranslation, indexTranslation, showTranslationProcess };
|
//Creation a rotation effect of arrow up to down:
var simTooltip = document.getElementsByClassName("simple-tooltip"),
arrow = document.getElementsByClassName("arrow"),
dropList = document.getElementsByClassName("visible-content"),
contentList = document.getElementsByClassName("contentOfList");
simTooltip[0].addEventListener("mouseover",function(){
arrow[0].classList.toggle('down',true);
showDropdownList(true);
showContentOfList(true);
});
simTooltip[0].addEventListener("mouseout",function(){
arrow[0].classList.toggle('down',false);
showDropdownList(false);
showContentOfList(false);
});
function showDropdownList(show){
if(show !== true){
dropList[0].classList.toggle('dropHeight',false);
} else {
dropList[0].classList.toggle('dropHeight',true);
}
}
function showContentOfList(show){
(show !== true) ? contentList[0].classList.toggle('showContent',false) : contentList[0].classList.toggle('showContent',true);
}
dropList[0].addEventListener("mouseover",function(){
showDropdownList(true);
showContentOfList(true);
arrow[0].classList.toggle('down',true);
});
dropList[0].addEventListener("mouseout",function(){
showDropdownList(false);
showContentOfList(false);
arrow[0].classList.toggle('down',false);
});
var chatIcon = document.getElementsByClassName("material-icons chat-icon");
var helpMessage = document.getElementsByClassName("help-message");
document.getElementsByClassName("chat-help")[0].onmouseover = function(){
chatIcon[0].innerHTML = "chat";
chatIcon[0].style.transition = ".5s ease-out";
helpMessage[0].classList.toggle('hideWidth',true);
}
document.getElementsByClassName("chat-help")[0].onmouseout = function(){
chatIcon[0].innerHTML = "help";
chatIcon[0].style.transition = ".5 ease-out";
helpMessage[0].classList.toggle('hideWidth',false);
}
window.onscroll = function() {
var header = document.getElementsByClassName("sticky-header");
var uHeader = document.getElementsByClassName("mainmenu");
var pH = document.getElementsByClassName("float-block");
var H = document.body.scrollHeight - pH[0].scrollHeight;
if(document.body.scrollTop > H) {
header[0].classList.toggle('hide-stick',true);
uHeader[0].classList.toggle('stick-header',true);
} else {
header[0].classList.toogle('hide-stick',false);
uHeader[0].classList.toggle('stick-header',false);
}
}
//Creation a mini-script for calculation length of circle:
var lcircle = document.getElementById("lcircle");
var lcircleOut = document.getElementById("lcircle-val");
var lcircleOutput = document.getElementById("lcircle-output");
lcircle.oninput = function() {
lcircleOut.innerHTML = lcircle.value + '(m)';
lcircleOutput.innerHTML = 'L = ' + 2 * Math.PI * lcircle.value + '(m)';
}
//For sidebar navigation panel:
var sidePanel = document.getElementsByClassName("side-buttons");
console.log(sidePanel[0].innerHTML);
|
from jsonrpc import ServiceProxy
access = ServiceProxy("http://127.0.0.1:2668")
pwd = raw_input("Enter old wallet passphrase: ")
pwd2 = raw_input("Enter new wallet passphrase: ")
access.walletpassphrasechange(pwd, pwd2)
|
import React from 'react'
import { ExampleComponent } from 'npm-tester'
import 'npm-tester/dist/index.css'
const App = () => {
return <ExampleComponent text="Create React Library Example 😄" />
}
export default App
|
compliments = ["You have very smooth hair.","You deserve a promotion.","Good effort!","What a fine sweater!","I appreciate all of your opinions.","I like your style.","Your T-shirt smells fresh.","I love what you've done with the place.","You are like a spring flower; beautiful and vivacious.","I am utterly disarmed by your wit.","I really enjoy the way you pronounce the word 'ruby'.","You complete me.","Well done!","I like your Facebook status.","That looks nice on you.","I like those shoes more than mine.","Nice motor control!","You have a good taste in websites.","Your mouse told me that you have very soft hands.","You are full of youth.","I like your jacket.","I like the way you move.","You have a good web-surfing stance.","You should be a poster child for poster children.","Nice manners!","I appreciate you more than Santa appreciates chimney grease.","I wish I was your mirror.","I find you to be a fountain of inspiration.","You have perfect bone structure.","I disagree with anyone who disagrees with you.","Way to go!","Have you been working out?","With your creative wit, I'm sure you could come up with better compliments than me.","I like your socks.","You are so charming.","Your cooking reminds me of my mother's.","You're tremendous!","You deserve a compliment!","Hello, good looking.","Your smile is breath taking.","How do you get your hair to look that great?","You are quite strapping.","I am grateful to be blessed by your presence.","Say, aren't you that famous model from TV?","Take a break; you've earned it.","Your life is so interesting!","The sound of your voice sends tingles of joy down my back.","I enjoy spending time with you.","I would share my dessert with you.","You can have the last bite.","May I have this dance?","I would love to visit you, but I live on the Internet.","I love the way you click.","You're invited to my birthday party.","All of your ideas are brilliant!","If I freeze, it's not a computer virus. I was just stunned by your beauty.","You're spontaneous, and I love it!","You should try out for everything.","You make my data circuits skip a beat.","You are the gravy to my mashed potatoes.","You get an A+!","I'm jealous of the other websites you visit, because I enjoy seeing you so much!","I would enjoy a roadtrip with you.","If I had to choose between you or Mr. Rogers, it would be you.","I like you more than the smell of Grandma's home-made apple pies.","You would look good in glasses OR contacts.","Let's do this again sometime.","You could go longer without a shower than most people.","I feel the need to impress you.","I would trust you to pick out a pet fish for me.","I'm glad we met.","Do that again!","Will you sign my yearbook?","You're so smart!","We should start a band.","You're cooler than ice-skating Fonzi.","I made this website for you.","I heard you make really good French Toast.","You're cooler than Pirates and Ninjas combined.","Oh, I can keep going.","I like your pants.","You're pretty groovy, dude.","When I grow up, I want to be just like you.","I told all my friends about how cool you are.","You can play any prank, and get away with it.","You have ten of the best fingers I have ever seen!","I can tell that we are gonna be friends.","I just want to gobble you up!","You're sweeter than than a bucket of bon-bons!","Treat yourself to another compliment!","You're pretty high on my list of people with whom I would want to be stranded on an island.","You're #1 in my book!","Well played.","You are well groomed.","You could probably lead a rebellion.","Is it hot in here or is it just you?","<3","You are more fun than a Japanese steakhouse.","Your voice is more soothing than Morgan Freeman's.","I like your sleeves. They're real big.","You could be drinking whole milk if you wanted to.","You're so beautiful, you make me walk into things when I look at you.","I support all of your decisions.","You are as fun as a hot tub full of chocolate pudding.","I usually don't say this on a first date, but will you marry me?","I don't speak much English, but with you all I really need to say is beautiful.","Being awesome is hard, but you'll manage.","Your skin is radiant.","You will still be beautiful when you get older.","You could survive a zombie apocalypse.","You make me :)","I wish I could move your furniture.","I think about you while I'm on the toilet.","You're so rad.","You're more fun than a barrel of monkeys.","You're nicer than a day on the beach.","Your glass is the fullest.","I find you very relevant.","You look so perfect.","The only difference between exceptional and amazing is you.","Last night I had the hiccups, and the only thing that comforted me to sleep was repeating your name over and over.","I like your pearly whites!","Your eyebrows really make your pretty eyes stand out.","Shall I compare thee to a summer's day? Thou art more lovely and more temperate.","I love you more than bacon!","You intrigue me.","You make me think of beautiful things, like strawberries.","I would share my fruit Gushers with you.","You're more aesthetically pleasant to look at than that one green color on this website.","Even though this goes against everything I know, I think I'm in love with you.","You're more fun than bubble wrap.","Your smile could illuminate the depths of the ocean.","You make babies smile.","You make the gloomy days a little less gloomy.","You are warmer than a Snuggie.","You make me feel like I am on top of the world.","Playing video games with you would be fun.","Let's never stop hanging out.","You're more cuddly than the Downy Bear.","I would do your taxes any day.","You are a bucket of awesome.","You are the star of my daydreams.","If you really wanted to, you could probably get a bird to land on your shoulder and hang out with you.","My mom always asks me why I can't be more like you.","You look great in this or any other light.","You listen to the coolest music.","You and Chuck Norris are on equal levels.","Your body fat percentage is perfectly suited for your height.","I am having trouble coming up with a compliment worthy enough for you.","If we were playing kickball, I'd pick you first.","You're cooler than ice on the rocks.","You're the bee's knees.","I wish I could choose your handwriting as a font.","You definitely know the difference between your and you're.","You have good taste.","I named all my appliances after you.","Your mind is a maze of amazing!","Don't worry about procrastinating on your studies, I know you'll do great!","I like your style!","Hi, I'd like to know why you're so beautiful.","If I could count the seconds I think about you, I will die in the process!","If you were in a chemistry class with me, it would be 10x less boring.","If you broke your arm, I would carry your books for you.","I love the way your eyes crinkle at the corners when you smile.","You make me want to be the person I am capable of being.","You're a skilled driver.","You are the rare catalyst to my volatile compound.","You're a tall glass of water!","I'd like to kiss you. Often.","You are the wind beneath my wings.","Looking at you makes my foot cramps go away instantaneously.","I like your face.","You are a champ!","You are infatuating.","Even my cat likes you.","There isn't a thing about you that I don't like.","You're so cool, that on a scale of from 1-10, you're elevendyseven.","OH, you OWN that ponytail.","Your shoes are untied. But for you, it's cool.","You have the best laugh ever.","We would enjoy a cookout with you!","Your name is fun to say.","I love you more than a drunk college student loves tacos.","My camera isn't worthy to take your picture.","You are the sugar on my rice krispies.","Nice belt!","I could hang out with you for a solid year and never get tired of you.","You're real happening in a far out way.","I bet you could take a punch from Mike Tyson.","Your feet are perfect size!","You have very nice teeth.","Can you teach me how to be as awesome as you?","Our awkward silences aren't even awkward.","Don't worry. You'll do great.","I enjoy you more than a good sneeze. A GOOD one.","You could invent words and people would use them.","You have powerful sweaters.","If you were around, I would enjoy doing my taxes.","You look like you like to rock.","You are better than unicorns and sparkles combined!","You are the watermelon in my fruit salad. Yum!","I dig you.","You look better whether the lights are on or off.","I am enchanted to meet you.","I bet even your farts smell good.","I would trust my children with you.","You make me forget what I was going to...","Your smile makes me smile.","I'd wake up for an 8 a.m. class just so I could sit next to you.","You have the moves like Jagger.","You're so hot that you denature my proteins.","All I want for Christmas is you!","You are the world's greatest hugger.","You have a perfectly symmetrical face.","If you were in a movie you wouldn't get killed off.","Your red ruby lips and wiggly hips make me do flips!","I definitely wouldn't kick you out of bed.","They should name an ice cream flavor after you.","You're the salsa to my tortilla chips. You spice up my life!","You smell nice.","You don't need make-up, make-up needs you.","Me without you is like a nerd without braces, a shoe with out laces, asentencewithoutspaces.","Just knowing someone as cool as you will read this makes me smile.","I would volunteer to take your place in the Hunger Games.","If I had a nickel for everytime you did something stupid, I'd be broke!","I'd let you steal the white part of my Oreo.","I'd trust you to perform open heart surgery on me... blindfolded!","Nice butt! - According to your toilet seat","Perfume strives to smell like you.","I've had the time of my life, and I owe it all to you!","The Force is strong with you.","I like the way your nostrils are placed on your nose.","I would hold the elevator doors open for you if they were closing.","Your every thought and motion contributes to the beauty of the universe.","You make me want to frolic in a field."]
|
import React, { useState, useEffect } from 'react';
import ReactDOM from 'react-dom';
import { Column } from '@ant-design/charts';
const DemoColumn = () => {
const data = [
{
type: '分类一',
value: 27,
},
{
type: '分类二',
value: 25,
},
{
type: '分类三',
value: 18,
},
{
type: '分类四',
value: 15,
},
{
type: '其他',
value: 5,
},
];
function drawCircle(context, x, y, r, fill) {
context.beginPath();
context.fillStyle = fill;
context.arc(x, y, r, 0, 2 * Math.PI, false);
context.fill();
context.closePath();
}
function strokeRectangle(context, x, y, w, h, fill, lineWidth) {
context.strokeStyle = fill;
context.lineWidth = lineWidth;
context.strokeRect(x, y, w, h);
}
function drawRectangle(context, x, y, w, h, fill) {
context.fillStyle = fill;
context.fillRect(x, y, w, h);
}
function createPattern(width = 200, height = width, color, rotation = 0) {
const pixelRatio = window.devicePixelRatio || 2; // 1. 创建 canvas
const canvas = document.createElement('canvas'); // 画布尺寸
canvas.width = width * pixelRatio;
canvas.height = height * pixelRatio; // 显示尺寸
canvas.style.width = `${width}px`;
canvas.style.height = `${height}px`;
const context = canvas.getContext('2d');
context.scale(pixelRatio, pixelRatio); // 2. 绘制
context.globalAlpha = 1;
context.lineWidth = 0;
const padding = 4;
drawRectangle(context, 0, 0, width, height, color);
drawCircle(context, width / 2, height / 2, Math.min(width / 4, height / 4), '#fff');
drawRectangle(context, width / 2 - padding / 2, 0, padding, height, '#fff');
drawRectangle(context, 0, height / 2 - padding / 2, width, padding, '#fff');
strokeRectangle(context, 0, 0, width, height, color, 0.5);
const pattern = context.createPattern(canvas, 'repeat');
if (pattern) {
const radian = (rotation * Math.PI) / 180;
pattern.setTransform({
a: Math.cos(radian) * (1 / pixelRatio),
b: Math.sin(radian) * (1 / pixelRatio),
c: -Math.sin(radian) * (1 / pixelRatio),
d: Math.cos(radian) * (1 / pixelRatio),
e: 0,
f: 0,
});
}
return pattern;
}
const config = {
data,
yField: 'value',
xField: 'type',
pattern: createPattern(20, 20, '#9c88ff', 45),
columnStyle: {
stroke: '#9c88ff',
strokeOpacity: 0.85,
},
interactions: [
{
type: 'element-active',
},
],
};
return <Column {...config} />;
};
ReactDOM.render(<DemoColumn />, document.getElementById('container'));
|
/*!
* Bootstrap v3.3.7 (http://getbootstrap.com)
* Copyright 2011-2017 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
*/
/*!
* Generated using the Bootstrap Customizer (http://getbootstrap.com/customize/?id=379d67bc9d795cdf2f00ef632b124f87)
* Config saved to config.json and https://gist.github.com/379d67bc9d795cdf2f00ef632b124f87
*/
if("undefined"==typeof jQuery)throw new Error("Bootstrap's JavaScript requires jQuery");+function(t){"use strict";var e=t.fn.jquery.split(" ")[0].split(".");if(e[0]<2&&e[1]<9||1==e[0]&&9==e[1]&&e[2]<1||e[0]>3)throw new Error("Bootstrap's JavaScript requires jQuery version 1.9.1 or higher, but lower than version 4")}(jQuery),+function(t){"use strict";function e(e){return this.each(function(){var i=t(this),n=i.data("bs.alert");n||i.data("bs.alert",n=new o(this)),"string"==typeof e&&n[e].call(i)})}var i='[data-dismiss="alert"]',o=function(e){t(e).on("click",i,this.close)};o.VERSION="3.3.7",o.TRANSITION_DURATION=150,o.prototype.close=function(e){function i(){a.detach().trigger("closed.bs.alert").remove()}var n=t(this),s=n.attr("data-target");s||(s=n.attr("href"),s=s&&s.replace(/.*(?=#[^\s]*$)/,""));var a=t("#"===s?[]:s);e&&e.preventDefault(),a.length||(a=n.closest(".alert")),a.trigger(e=t.Event("close.bs.alert")),e.isDefaultPrevented()||(a.removeClass("in"),t.support.transition&&a.hasClass("fade")?a.one("bsTransitionEnd",i).emulateTransitionEnd(o.TRANSITION_DURATION):i())};var n=t.fn.alert;t.fn.alert=e,t.fn.alert.Constructor=o,t.fn.alert.noConflict=function(){return t.fn.alert=n,this},t(document).on("click.bs.alert.data-api",i,o.prototype.close)}(jQuery),+function(t){"use strict";function e(e){return this.each(function(){var o=t(this),n=o.data("bs.button"),s="object"==typeof e&&e;n||o.data("bs.button",n=new i(this,s)),"toggle"==e?n.toggle():e&&n.setState(e)})}var i=function(e,o){this.$element=t(e),this.options=t.extend({},i.DEFAULTS,o),this.isLoading=!1};i.VERSION="3.3.7",i.DEFAULTS={loadingText:"loading..."},i.prototype.setState=function(e){var i="disabled",o=this.$element,n=o.is("input")?"val":"html",s=o.data();e+="Text",null==s.resetText&&o.data("resetText",o[n]()),setTimeout(t.proxy(function(){o[n](null==s[e]?this.options[e]:s[e]),"loadingText"==e?(this.isLoading=!0,o.addClass(i).attr(i,i).prop(i,!0)):this.isLoading&&(this.isLoading=!1,o.removeClass(i).removeAttr(i).prop(i,!1))},this),0)},i.prototype.toggle=function(){var t=!0,e=this.$element.closest('[data-toggle="buttons"]');if(e.length){var i=this.$element.find("input");"radio"==i.prop("type")?(i.prop("checked")&&(t=!1),e.find(".active").removeClass("active"),this.$element.addClass("active")):"checkbox"==i.prop("type")&&(i.prop("checked")!==this.$element.hasClass("active")&&(t=!1),this.$element.toggleClass("active")),i.prop("checked",this.$element.hasClass("active")),t&&i.trigger("change")}else this.$element.attr("aria-pressed",!this.$element.hasClass("active")),this.$element.toggleClass("active")};var o=t.fn.button;t.fn.button=e,t.fn.button.Constructor=i,t.fn.button.noConflict=function(){return t.fn.button=o,this},t(document).on("click.bs.button.data-api",'[data-toggle^="button"]',function(i){var o=t(i.target).closest(".btn");e.call(o,"toggle"),t(i.target).is('input[type="radio"], input[type="checkbox"]')||(i.preventDefault(),o.is("input,button")?o.trigger("focus"):o.find("input:visible,button:visible").first().trigger("focus"))}).on("focus.bs.button.data-api blur.bs.button.data-api",'[data-toggle^="button"]',function(e){t(e.target).closest(".btn").toggleClass("focus",/^focus(in)?$/.test(e.type))})}(jQuery),+function(t){"use strict";function e(e){return this.each(function(){var o=t(this),n=o.data("bs.carousel"),s=t.extend({},i.DEFAULTS,o.data(),"object"==typeof e&&e),a="string"==typeof e?e:s.slide;n||o.data("bs.carousel",n=new i(this,s)),"number"==typeof e?n.to(e):a?n[a]():s.interval&&n.pause().cycle()})}var i=function(e,i){this.$element=t(e),this.$indicators=this.$element.find(".carousel-indicators"),this.options=i,this.paused=null,this.sliding=null,this.interval=null,this.$active=null,this.$items=null,this.options.keyboard&&this.$element.on("keydown.bs.carousel",t.proxy(this.keydown,this)),"hover"==this.options.pause&&!("ontouchstart"in document.documentElement)&&this.$element.on("mouseenter.bs.carousel",t.proxy(this.pause,this)).on("mouseleave.bs.carousel",t.proxy(this.cycle,this))};i.VERSION="3.3.7",i.TRANSITION_DURATION=600,i.DEFAULTS={interval:5e3,pause:"hover",wrap:!0,keyboard:!0},i.prototype.keydown=function(t){if(!/input|textarea/i.test(t.target.tagName)){switch(t.which){case 37:this.prev();break;case 39:this.next();break;default:return}t.preventDefault()}},i.prototype.cycle=function(e){return e||(this.paused=!1),this.interval&&clearInterval(this.interval),this.options.interval&&!this.paused&&(this.interval=setInterval(t.proxy(this.next,this),this.options.interval)),this},i.prototype.getItemIndex=function(t){return this.$items=t.parent().children(".item"),this.$items.index(t||this.$active)},i.prototype.getItemForDirection=function(t,e){var i=this.getItemIndex(e),o="prev"==t&&0===i||"next"==t&&i==this.$items.length-1;if(o&&!this.options.wrap)return e;var n="prev"==t?-1:1,s=(i+n)%this.$items.length;return this.$items.eq(s)},i.prototype.to=function(t){var e=this,i=this.getItemIndex(this.$active=this.$element.find(".item.active"));return t>this.$items.length-1||0>t?void 0:this.sliding?this.$element.one("slid.bs.carousel",function(){e.to(t)}):i==t?this.pause().cycle():this.slide(t>i?"next":"prev",this.$items.eq(t))},i.prototype.pause=function(e){return e||(this.paused=!0),this.$element.find(".next, .prev").length&&t.support.transition&&(this.$element.trigger(t.support.transition.end),this.cycle(!0)),this.interval=clearInterval(this.interval),this},i.prototype.next=function(){return this.sliding?void 0:this.slide("next")},i.prototype.prev=function(){return this.sliding?void 0:this.slide("prev")},i.prototype.slide=function(e,o){var n=this.$element.find(".item.active"),s=o||this.getItemForDirection(e,n),a=this.interval,r="next"==e?"left":"right",l=this;if(s.hasClass("active"))return this.sliding=!1;var h=s[0],d=t.Event("slide.bs.carousel",{relatedTarget:h,direction:r});if(this.$element.trigger(d),!d.isDefaultPrevented()){if(this.sliding=!0,a&&this.pause(),this.$indicators.length){this.$indicators.find(".active").removeClass("active");var p=t(this.$indicators.children()[this.getItemIndex(s)]);p&&p.addClass("active")}var c=t.Event("slid.bs.carousel",{relatedTarget:h,direction:r});return t.support.transition&&this.$element.hasClass("slide")?(s.addClass(e),s[0].offsetWidth,n.addClass(r),s.addClass(r),n.one("bsTransitionEnd",function(){s.removeClass([e,r].join(" ")).addClass("active"),n.removeClass(["active",r].join(" ")),l.sliding=!1,setTimeout(function(){l.$element.trigger(c)},0)}).emulateTransitionEnd(i.TRANSITION_DURATION)):(n.removeClass("active"),s.addClass("active"),this.sliding=!1,this.$element.trigger(c)),a&&this.cycle(),this}};var o=t.fn.carousel;t.fn.carousel=e,t.fn.carousel.Constructor=i,t.fn.carousel.noConflict=function(){return t.fn.carousel=o,this};var n=function(i){var o,n=t(this),s=t(n.attr("data-target")||(o=n.attr("href"))&&o.replace(/.*(?=#[^\s]+$)/,""));if(s.hasClass("carousel")){var a=t.extend({},s.data(),n.data()),r=n.attr("data-slide-to");r&&(a.interval=!1),e.call(s,a),r&&s.data("bs.carousel").to(r),i.preventDefault()}};t(document).on("click.bs.carousel.data-api","[data-slide]",n).on("click.bs.carousel.data-api","[data-slide-to]",n),t(window).on("load",function(){t('[data-ride="carousel"]').each(function(){var i=t(this);e.call(i,i.data())})})}(jQuery),+function(t){"use strict";function e(e){var i=e.attr("data-target");i||(i=e.attr("href"),i=i&&/#[A-Za-z]/.test(i)&&i.replace(/.*(?=#[^\s]*$)/,""));var o=i&&t(i);return o&&o.length?o:e.parent()}function i(i){i&&3===i.which||(t(n).remove(),t(s).each(function(){var o=t(this),n=e(o),s={relatedTarget:this};n.hasClass("open")&&(i&&"click"==i.type&&/input|textarea/i.test(i.target.tagName)&&t.contains(n[0],i.target)||(n.trigger(i=t.Event("hide.bs.dropdown",s)),i.isDefaultPrevented()||(o.attr("aria-expanded","false"),n.removeClass("open").trigger(t.Event("hidden.bs.dropdown",s)))))}))}function o(e){return this.each(function(){var i=t(this),o=i.data("bs.dropdown");o||i.data("bs.dropdown",o=new a(this)),"string"==typeof e&&o[e].call(i)})}var n=".dropdown-backdrop",s='[data-toggle="dropdown"]',a=function(e){t(e).on("click.bs.dropdown",this.toggle)};a.VERSION="3.3.7",a.prototype.toggle=function(o){var n=t(this);if(!n.is(".disabled, :disabled")){var s=e(n),a=s.hasClass("open");if(i(),!a){"ontouchstart"in document.documentElement&&!s.closest(".navbar-nav").length&&t(document.createElement("div")).addClass("dropdown-backdrop").insertAfter(t(this)).on("click",i);var r={relatedTarget:this};if(s.trigger(o=t.Event("show.bs.dropdown",r)),o.isDefaultPrevented())return;n.trigger("focus").attr("aria-expanded","true"),s.toggleClass("open").trigger(t.Event("shown.bs.dropdown",r))}return!1}},a.prototype.keydown=function(i){if(/(38|40|27|32)/.test(i.which)&&!/input|textarea/i.test(i.target.tagName)){var o=t(this);if(i.preventDefault(),i.stopPropagation(),!o.is(".disabled, :disabled")){var n=e(o),a=n.hasClass("open");if(!a&&27!=i.which||a&&27==i.which)return 27==i.which&&n.find(s).trigger("focus"),o.trigger("click");var r=" li:not(.disabled):visible a",l=n.find(".dropdown-menu"+r);if(l.length){var h=l.index(i.target);38==i.which&&h>0&&h--,40==i.which&&h<l.length-1&&h++,~h||(h=0),l.eq(h).trigger("focus")}}}};var r=t.fn.dropdown;t.fn.dropdown=o,t.fn.dropdown.Constructor=a,t.fn.dropdown.noConflict=function(){return t.fn.dropdown=r,this},t(document).on("click.bs.dropdown.data-api",i).on("click.bs.dropdown.data-api",".dropdown form",function(t){t.stopPropagation()}).on("click.bs.dropdown.data-api",s,a.prototype.toggle).on("keydown.bs.dropdown.data-api",s,a.prototype.keydown).on("keydown.bs.dropdown.data-api",".dropdown-menu",a.prototype.keydown)}(jQuery),+function(t){"use strict";function e(e,o){return this.each(function(){var n=t(this),s=n.data("bs.modal"),a=t.extend({},i.DEFAULTS,n.data(),"object"==typeof e&&e);s||n.data("bs.modal",s=new i(this,a)),"string"==typeof e?s[e](o):a.show&&s.show(o)})}var i=function(e,i){this.options=i,this.$body=t(document.body),this.$element=t(e),this.$dialog=this.$element.find(".modal-dialog"),this.$backdrop=null,this.isShown=null,this.originalBodyPad=null,this.scrollbarWidth=0,this.ignoreBackdropClick=!1,this.options.remote&&this.$element.find(".modal-content").load(this.options.remote,t.proxy(function(){this.$element.trigger("loaded.bs.modal")},this))};i.VERSION="3.3.7",i.TRANSITION_DURATION=300,i.BACKDROP_TRANSITION_DURATION=150,i.DEFAULTS={backdrop:!0,keyboard:!0,show:!0},i.prototype.toggle=function(t){return this.isShown?this.hide():this.show(t)},i.prototype.show=function(e){var o=this,n=t.Event("show.bs.modal",{relatedTarget:e});this.$element.trigger(n),this.isShown||n.isDefaultPrevented()||(this.isShown=!0,this.checkScrollbar(),this.setScrollbar(),this.$body.addClass("modal-open"),this.escape(),this.resize(),this.$element.on("click.dismiss.bs.modal",'[data-dismiss="modal"]',t.proxy(this.hide,this)),this.$dialog.on("mousedown.dismiss.bs.modal",function(){o.$element.one("mouseup.dismiss.bs.modal",function(e){t(e.target).is(o.$element)&&(o.ignoreBackdropClick=!0)})}),this.backdrop(function(){var n=t.support.transition&&o.$element.hasClass("fade");o.$element.parent().length||o.$element.appendTo(o.$body),o.$element.show().scrollTop(0),o.adjustDialog(),n&&o.$element[0].offsetWidth,o.$element.addClass("in"),o.enforceFocus();var s=t.Event("shown.bs.modal",{relatedTarget:e});n?o.$dialog.one("bsTransitionEnd",function(){o.$element.trigger("focus").trigger(s)}).emulateTransitionEnd(i.TRANSITION_DURATION):o.$element.trigger("focus").trigger(s)}))},i.prototype.hide=function(e){e&&e.preventDefault(),e=t.Event("hide.bs.modal"),this.$element.trigger(e),this.isShown&&!e.isDefaultPrevented()&&(this.isShown=!1,this.escape(),this.resize(),t(document).off("focusin.bs.modal"),this.$element.removeClass("in").off("click.dismiss.bs.modal").off("mouseup.dismiss.bs.modal"),this.$dialog.off("mousedown.dismiss.bs.modal"),t.support.transition&&this.$element.hasClass("fade")?this.$element.one("bsTransitionEnd",t.proxy(this.hideModal,this)).emulateTransitionEnd(i.TRANSITION_DURATION):this.hideModal())},i.prototype.enforceFocus=function(){t(document).off("focusin.bs.modal").on("focusin.bs.modal",t.proxy(function(t){document===t.target||this.$element[0]===t.target||this.$element.has(t.target).length||this.$element.trigger("focus")},this))},i.prototype.escape=function(){this.isShown&&this.options.keyboard?this.$element.on("keydown.dismiss.bs.modal",t.proxy(function(t){27==t.which&&this.hide()},this)):this.isShown||this.$element.off("keydown.dismiss.bs.modal")},i.prototype.resize=function(){this.isShown?t(window).on("resize.bs.modal",t.proxy(this.handleUpdate,this)):t(window).off("resize.bs.modal")},i.prototype.hideModal=function(){var t=this;this.$element.hide(),this.backdrop(function(){t.$body.removeClass("modal-open"),t.resetAdjustments(),t.resetScrollbar(),t.$element.trigger("hidden.bs.modal")})},i.prototype.removeBackdrop=function(){this.$backdrop&&this.$backdrop.remove(),this.$backdrop=null},i.prototype.backdrop=function(e){var o=this,n=this.$element.hasClass("fade")?"fade":"";if(this.isShown&&this.options.backdrop){var s=t.support.transition&&n;if(this.$backdrop=t(document.createElement("div")).addClass("modal-backdrop "+n).appendTo(this.$body),this.$element.on("click.dismiss.bs.modal",t.proxy(function(t){return this.ignoreBackdropClick?void(this.ignoreBackdropClick=!1):void(t.target===t.currentTarget&&("static"==this.options.backdrop?this.$element[0].focus():this.hide()))},this)),s&&this.$backdrop[0].offsetWidth,this.$backdrop.addClass("in"),!e)return;s?this.$backdrop.one("bsTransitionEnd",e).emulateTransitionEnd(i.BACKDROP_TRANSITION_DURATION):e()}else if(!this.isShown&&this.$backdrop){this.$backdrop.removeClass("in");var a=function(){o.removeBackdrop(),e&&e()};t.support.transition&&this.$element.hasClass("fade")?this.$backdrop.one("bsTransitionEnd",a).emulateTransitionEnd(i.BACKDROP_TRANSITION_DURATION):a()}else e&&e()},i.prototype.handleUpdate=function(){this.adjustDialog()},i.prototype.adjustDialog=function(){var t=this.$element[0].scrollHeight>document.documentElement.clientHeight;this.$element.css({paddingLeft:!this.bodyIsOverflowing&&t?this.scrollbarWidth:"",paddingRight:this.bodyIsOverflowing&&!t?this.scrollbarWidth:""})},i.prototype.resetAdjustments=function(){this.$element.css({paddingLeft:"",paddingRight:""})},i.prototype.checkScrollbar=function(){var t=window.innerWidth;if(!t){var e=document.documentElement.getBoundingClientRect();t=e.right-Math.abs(e.left)}this.bodyIsOverflowing=document.body.clientWidth<t,this.scrollbarWidth=this.measureScrollbar()},i.prototype.setScrollbar=function(){var t=parseInt(this.$body.css("padding-right")||0,10);this.originalBodyPad=document.body.style.paddingRight||"",this.bodyIsOverflowing&&this.$body.css("padding-right",t+this.scrollbarWidth)},i.prototype.resetScrollbar=function(){this.$body.css("padding-right",this.originalBodyPad)},i.prototype.measureScrollbar=function(){var t=document.createElement("div");t.className="modal-scrollbar-measure",this.$body.append(t);var e=t.offsetWidth-t.clientWidth;return this.$body[0].removeChild(t),e};var o=t.fn.modal;t.fn.modal=e,t.fn.modal.Constructor=i,t.fn.modal.noConflict=function(){return t.fn.modal=o,this},t(document).on("click.bs.modal.data-api",'[data-toggle="modal"]',function(i){var o=t(this),n=o.attr("href"),s=t(o.attr("data-target")||n&&n.replace(/.*(?=#[^\s]+$)/,"")),a=s.data("bs.modal")?"toggle":t.extend({remote:!/#/.test(n)&&n},s.data(),o.data());o.is("a")&&i.preventDefault(),s.one("show.bs.modal",function(t){t.isDefaultPrevented()||s.one("hidden.bs.modal",function(){o.is(":visible")&&o.trigger("focus")})}),e.call(s,a,this)})}(jQuery),+function(t){"use strict";function e(e){return this.each(function(){var o=t(this),n=o.data("bs.tooltip"),s="object"==typeof e&&e;!n&&/destroy|hide/.test(e)||(n||o.data("bs.tooltip",n=new i(this,s)),"string"==typeof e&&n[e]())})}var i=function(t,e){this.type=null,this.options=null,this.enabled=null,this.timeout=null,this.hoverState=null,this.$element=null,this.inState=null,this.init("tooltip",t,e)};i.VERSION="3.3.7",i.TRANSITION_DURATION=150,i.DEFAULTS={animation:!0,placement:"top",selector:!1,template:'<div class="tooltip" role="tooltip"><div class="tooltip-arrow"></div><div class="tooltip-inner"></div></div>',trigger:"hover focus",title:"",delay:0,html:!1,container:!1,viewport:{selector:"body",padding:0}},i.prototype.init=function(e,i,o){if(this.enabled=!0,this.type=e,this.$element=t(i),this.options=this.getOptions(o),this.$viewport=this.options.viewport&&t(t.isFunction(this.options.viewport)?this.options.viewport.call(this,this.$element):this.options.viewport.selector||this.options.viewport),this.inState={click:!1,hover:!1,focus:!1},this.$element[0]instanceof document.constructor&&!this.options.selector)throw new Error("`selector` option must be specified when initializing "+this.type+" on the window.document object!");for(var n=this.options.trigger.split(" "),s=n.length;s--;){var a=n[s];if("click"==a)this.$element.on("click."+this.type,this.options.selector,t.proxy(this.toggle,this));else if("manual"!=a){var r="hover"==a?"mouseenter":"focusin",l="hover"==a?"mouseleave":"focusout";this.$element.on(r+"."+this.type,this.options.selector,t.proxy(this.enter,this)),this.$element.on(l+"."+this.type,this.options.selector,t.proxy(this.leave,this))}}this.options.selector?this._options=t.extend({},this.options,{trigger:"manual",selector:""}):this.fixTitle()},i.prototype.getDefaults=function(){return i.DEFAULTS},i.prototype.getOptions=function(e){return e=t.extend({},this.getDefaults(),this.$element.data(),e),e.delay&&"number"==typeof e.delay&&(e.delay={show:e.delay,hide:e.delay}),e},i.prototype.getDelegateOptions=function(){var e={},i=this.getDefaults();return this._options&&t.each(this._options,function(t,o){i[t]!=o&&(e[t]=o)}),e},i.prototype.enter=function(e){var i=e instanceof this.constructor?e:t(e.currentTarget).data("bs."+this.type);return i||(i=new this.constructor(e.currentTarget,this.getDelegateOptions()),t(e.currentTarget).data("bs."+this.type,i)),e instanceof t.Event&&(i.inState["focusin"==e.type?"focus":"hover"]=!0),i.tip().hasClass("in")||"in"==i.hoverState?void(i.hoverState="in"):(clearTimeout(i.timeout),i.hoverState="in",i.options.delay&&i.options.delay.show?void(i.timeout=setTimeout(function(){"in"==i.hoverState&&i.show()},i.options.delay.show)):i.show())},i.prototype.isInStateTrue=function(){for(var t in this.inState)if(this.inState[t])return!0;return!1},i.prototype.leave=function(e){var i=e instanceof this.constructor?e:t(e.currentTarget).data("bs."+this.type);return i||(i=new this.constructor(e.currentTarget,this.getDelegateOptions()),t(e.currentTarget).data("bs."+this.type,i)),e instanceof t.Event&&(i.inState["focusout"==e.type?"focus":"hover"]=!1),i.isInStateTrue()?void 0:(clearTimeout(i.timeout),i.hoverState="out",i.options.delay&&i.options.delay.hide?void(i.timeout=setTimeout(function(){"out"==i.hoverState&&i.hide()},i.options.delay.hide)):i.hide())},i.prototype.show=function(){var e=t.Event("show.bs."+this.type);if(this.hasContent()&&this.enabled){this.$element.trigger(e);var o=t.contains(this.$element[0].ownerDocument.documentElement,this.$element[0]);if(e.isDefaultPrevented()||!o)return;var n=this,s=this.tip(),a=this.getUID(this.type);this.setContent(),s.attr("id",a),this.$element.attr("aria-describedby",a),this.options.animation&&s.addClass("fade");var r="function"==typeof this.options.placement?this.options.placement.call(this,s[0],this.$element[0]):this.options.placement,l=/\s?auto?\s?/i,h=l.test(r);h&&(r=r.replace(l,"")||"top"),s.detach().css({top:0,left:0,display:"block"}).addClass(r).data("bs."+this.type,this),this.options.container?s.appendTo(this.options.container):s.insertAfter(this.$element),this.$element.trigger("inserted.bs."+this.type);var d=this.getPosition(),p=s[0].offsetWidth,c=s[0].offsetHeight;if(h){var f=r,u=this.getPosition(this.$viewport);r="bottom"==r&&d.bottom+c>u.bottom?"top":"top"==r&&d.top-c<u.top?"bottom":"right"==r&&d.right+p>u.width?"left":"left"==r&&d.left-p<u.left?"right":r,s.removeClass(f).addClass(r)}var g=this.getCalculatedOffset(r,d,p,c);this.applyPlacement(g,r);var v=function(){var t=n.hoverState;n.$element.trigger("shown.bs."+n.type),n.hoverState=null,"out"==t&&n.leave(n)};t.support.transition&&this.$tip.hasClass("fade")?s.one("bsTransitionEnd",v).emulateTransitionEnd(i.TRANSITION_DURATION):v()}},i.prototype.applyPlacement=function(e,i){var o=this.tip(),n=o[0].offsetWidth,s=o[0].offsetHeight,a=parseInt(o.css("margin-top"),10),r=parseInt(o.css("margin-left"),10);isNaN(a)&&(a=0),isNaN(r)&&(r=0),e.top+=a,e.left+=r,t.offset.setOffset(o[0],t.extend({using:function(t){o.css({top:Math.round(t.top),left:Math.round(t.left)})}},e),0),o.addClass("in");var l=o[0].offsetWidth,h=o[0].offsetHeight;"top"==i&&h!=s&&(e.top=e.top+s-h);var d=this.getViewportAdjustedDelta(i,e,l,h);d.left?e.left+=d.left:e.top+=d.top;var p=/top|bottom/.test(i),c=p?2*d.left-n+l:2*d.top-s+h,f=p?"offsetWidth":"offsetHeight";o.offset(e),this.replaceArrow(c,o[0][f],p)},i.prototype.replaceArrow=function(t,e,i){this.arrow().css(i?"left":"top",50*(1-t/e)+"%").css(i?"top":"left","")},i.prototype.setContent=function(){var t=this.tip(),e=this.getTitle();t.find(".tooltip-inner")[this.options.html?"html":"text"](e),t.removeClass("fade in top bottom left right")},i.prototype.hide=function(e){function o(){"in"!=n.hoverState&&s.detach(),n.$element&&n.$element.removeAttr("aria-describedby").trigger("hidden.bs."+n.type),e&&e()}var n=this,s=t(this.$tip),a=t.Event("hide.bs."+this.type);return this.$element.trigger(a),a.isDefaultPrevented()?void 0:(s.removeClass("in"),t.support.transition&&s.hasClass("fade")?s.one("bsTransitionEnd",o).emulateTransitionEnd(i.TRANSITION_DURATION):o(),this.hoverState=null,this)},i.prototype.fixTitle=function(){var t=this.$element;(t.attr("title")||"string"!=typeof t.attr("data-original-title"))&&t.attr("data-original-title",t.attr("title")||"").attr("title","")},i.prototype.hasContent=function(){return this.getTitle()},i.prototype.getPosition=function(e){e=e||this.$element;var i=e[0],o="BODY"==i.tagName,n=i.getBoundingClientRect();null==n.width&&(n=t.extend({},n,{width:n.right-n.left,height:n.bottom-n.top}));var s=window.SVGElement&&i instanceof window.SVGElement,a=o?{top:0,left:0}:s?null:e.offset(),r={scroll:o?document.documentElement.scrollTop||document.body.scrollTop:e.scrollTop()},l=o?{width:t(window).width(),height:t(window).height()}:null;return t.extend({},n,r,l,a)},i.prototype.getCalculatedOffset=function(t,e,i,o){return"bottom"==t?{top:e.top+e.height,left:e.left+e.width/2-i/2}:"top"==t?{top:e.top-o,left:e.left+e.width/2-i/2}:"left"==t?{top:e.top+e.height/2-o/2,left:e.left-i}:{top:e.top+e.height/2-o/2,left:e.left+e.width}},i.prototype.getViewportAdjustedDelta=function(t,e,i,o){var n={top:0,left:0};if(!this.$viewport)return n;var s=this.options.viewport&&this.options.viewport.padding||0,a=this.getPosition(this.$viewport);if(/right|left/.test(t)){var r=e.top-s-a.scroll,l=e.top+s-a.scroll+o;r<a.top?n.top=a.top-r:l>a.top+a.height&&(n.top=a.top+a.height-l)}else{var h=e.left-s,d=e.left+s+i;h<a.left?n.left=a.left-h:d>a.right&&(n.left=a.left+a.width-d)}return n},i.prototype.getTitle=function(){var t,e=this.$element,i=this.options;return t=e.attr("data-original-title")||("function"==typeof i.title?i.title.call(e[0]):i.title)},i.prototype.getUID=function(t){do t+=~~(1e6*Math.random());while(document.getElementById(t));return t},i.prototype.tip=function(){if(!this.$tip&&(this.$tip=t(this.options.template),1!=this.$tip.length))throw new Error(this.type+" `template` option must consist of exactly 1 top-level element!");return this.$tip},i.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".tooltip-arrow")},i.prototype.enable=function(){this.enabled=!0},i.prototype.disable=function(){this.enabled=!1},i.prototype.toggleEnabled=function(){this.enabled=!this.enabled},i.prototype.toggle=function(e){var i=this;e&&(i=t(e.currentTarget).data("bs."+this.type),i||(i=new this.constructor(e.currentTarget,this.getDelegateOptions()),t(e.currentTarget).data("bs."+this.type,i))),e?(i.inState.click=!i.inState.click,i.isInStateTrue()?i.enter(i):i.leave(i)):i.tip().hasClass("in")?i.leave(i):i.enter(i)},i.prototype.destroy=function(){var t=this;clearTimeout(this.timeout),this.hide(function(){t.$element.off("."+t.type).removeData("bs."+t.type),t.$tip&&t.$tip.detach(),t.$tip=null,t.$arrow=null,t.$viewport=null,t.$element=null})};var o=t.fn.tooltip;t.fn.tooltip=e,t.fn.tooltip.Constructor=i,t.fn.tooltip.noConflict=function(){return t.fn.tooltip=o,this}}(jQuery),+function(t){"use strict";function e(e){return this.each(function(){var o=t(this),n=o.data("bs.popover"),s="object"==typeof e&&e;!n&&/destroy|hide/.test(e)||(n||o.data("bs.popover",n=new i(this,s)),"string"==typeof e&&n[e]())})}var i=function(t,e){this.init("popover",t,e)};if(!t.fn.tooltip)throw new Error("Popover requires tooltip.js");i.VERSION="3.3.7",i.DEFAULTS=t.extend({},t.fn.tooltip.Constructor.DEFAULTS,{placement:"right",trigger:"click",content:"",template:'<div class="popover" role="tooltip"><div class="arrow"></div><h3 class="popover-title"></h3><div class="popover-content"></div></div>'}),i.prototype=t.extend({},t.fn.tooltip.Constructor.prototype),i.prototype.constructor=i,i.prototype.getDefaults=function(){return i.DEFAULTS},i.prototype.setContent=function(){var t=this.tip(),e=this.getTitle(),i=this.getContent();t.find(".popover-title")[this.options.html?"html":"text"](e),t.find(".popover-content").children().detach().end()[this.options.html?"string"==typeof i?"html":"append":"text"](i),t.removeClass("fade top bottom left right in"),t.find(".popover-title").html()||t.find(".popover-title").hide()},i.prototype.hasContent=function(){return this.getTitle()||this.getContent()},i.prototype.getContent=function(){var t=this.$element,e=this.options;return t.attr("data-content")||("function"==typeof e.content?e.content.call(t[0]):e.content)},i.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".arrow")};var o=t.fn.popover;t.fn.popover=e,t.fn.popover.Constructor=i,t.fn.popover.noConflict=function(){return t.fn.popover=o,this}}(jQuery),+function(t){"use strict";function e(e){return this.each(function(){var o=t(this),n=o.data("bs.tab");n||o.data("bs.tab",n=new i(this)),"string"==typeof e&&n[e]()})}var i=function(e){this.element=t(e)};i.VERSION="3.3.7",i.TRANSITION_DURATION=150,i.prototype.show=function(){var e=this.element,i=e.closest("ul:not(.dropdown-menu)"),o=e.data("target");if(o||(o=e.attr("href"),o=o&&o.replace(/.*(?=#[^\s]*$)/,"")),!e.parent("li").hasClass("active")){var n=i.find(".active:last a"),s=t.Event("hide.bs.tab",{relatedTarget:e[0]}),a=t.Event("show.bs.tab",{relatedTarget:n[0]});if(n.trigger(s),e.trigger(a),!a.isDefaultPrevented()&&!s.isDefaultPrevented()){var r=t(o);this.activate(e.closest("li"),i),this.activate(r,r.parent(),function(){n.trigger({type:"hidden.bs.tab",relatedTarget:e[0]}),e.trigger({type:"shown.bs.tab",relatedTarget:n[0]})})}}},i.prototype.activate=function(e,o,n){function s(){a.removeClass("active").find("> .dropdown-menu > .active").removeClass("active").end().find('[data-toggle="tab"]').attr("aria-expanded",!1),e.addClass("active").find('[data-toggle="tab"]').attr("aria-expanded",!0),r?(e[0].offsetWidth,e.addClass("in")):e.removeClass("fade"),e.parent(".dropdown-menu").length&&e.closest("li.dropdown").addClass("active").end().find('[data-toggle="tab"]').attr("aria-expanded",!0),n&&n()}var a=o.find("> .active"),r=n&&t.support.transition&&(a.length&&a.hasClass("fade")||!!o.find("> .fade").length);a.length&&r?a.one("bsTransitionEnd",s).emulateTransitionEnd(i.TRANSITION_DURATION):s(),a.removeClass("in")};var o=t.fn.tab;t.fn.tab=e,t.fn.tab.Constructor=i,t.fn.tab.noConflict=function(){return t.fn.tab=o,this};var n=function(i){i.preventDefault(),e.call(t(this),"show")};t(document).on("click.bs.tab.data-api",'[data-toggle="tab"]',n).on("click.bs.tab.data-api",'[data-toggle="pill"]',n)}(jQuery),+function(t){"use strict";function e(e){return this.each(function(){var o=t(this),n=o.data("bs.affix"),s="object"==typeof e&&e;n||o.data("bs.affix",n=new i(this,s)),"string"==typeof e&&n[e]()})}var i=function(e,o){this.options=t.extend({},i.DEFAULTS,o),this.$target=t(this.options.target).on("scroll.bs.affix.data-api",t.proxy(this.checkPosition,this)).on("click.bs.affix.data-api",t.proxy(this.checkPositionWithEventLoop,this)),this.$element=t(e),this.affixed=null,this.unpin=null,this.pinnedOffset=null,this.checkPosition()};i.VERSION="3.3.7",i.RESET="affix affix-top affix-bottom",i.DEFAULTS={offset:0,target:window},i.prototype.getState=function(t,e,i,o){var n=this.$target.scrollTop(),s=this.$element.offset(),a=this.$target.height();if(null!=i&&"top"==this.affixed)return i>n?"top":!1;if("bottom"==this.affixed)return null!=i?n+this.unpin<=s.top?!1:"bottom":t-o>=n+a?!1:"bottom";var r=null==this.affixed,l=r?n:s.top,h=r?a:e;return null!=i&&i>=n?"top":null!=o&&l+h>=t-o?"bottom":!1},i.prototype.getPinnedOffset=function(){if(this.pinnedOffset)return this.pinnedOffset;this.$element.removeClass(i.RESET).addClass("affix");var t=this.$target.scrollTop(),e=this.$element.offset();return this.pinnedOffset=e.top-t},i.prototype.checkPositionWithEventLoop=function(){setTimeout(t.proxy(this.checkPosition,this),1)},i.prototype.checkPosition=function(){if(this.$element.is(":visible")){var e=this.$element.height(),o=this.options.offset,n=o.top,s=o.bottom,a=Math.max(t(document).height(),t(document.body).height());"object"!=typeof o&&(s=n=o),"function"==typeof n&&(n=o.top(this.$element)),"function"==typeof s&&(s=o.bottom(this.$element));var r=this.getState(a,e,n,s);if(this.affixed!=r){null!=this.unpin&&this.$element.css("top","");var l="affix"+(r?"-"+r:""),h=t.Event(l+".bs.affix");if(this.$element.trigger(h),h.isDefaultPrevented())return;this.affixed=r,this.unpin="bottom"==r?this.getPinnedOffset():null,this.$element.removeClass(i.RESET).addClass(l).trigger(l.replace("affix","affixed")+".bs.affix")}"bottom"==r&&this.$element.offset({top:a-e-s})}};var o=t.fn.affix;t.fn.affix=e,t.fn.affix.Constructor=i,t.fn.affix.noConflict=function(){return t.fn.affix=o,this},t(window).on("load",function(){t('[data-spy="affix"]').each(function(){var i=t(this),o=i.data();o.offset=o.offset||{},null!=o.offsetBottom&&(o.offset.bottom=o.offsetBottom),null!=o.offsetTop&&(o.offset.top=o.offsetTop),e.call(i,o)})})}(jQuery),+function(t){"use strict";function e(e){var i,o=e.attr("data-target")||(i=e.attr("href"))&&i.replace(/.*(?=#[^\s]+$)/,"");return t(o)}function i(e){return this.each(function(){var i=t(this),n=i.data("bs.collapse"),s=t.extend({},o.DEFAULTS,i.data(),"object"==typeof e&&e);!n&&s.toggle&&/show|hide/.test(e)&&(s.toggle=!1),n||i.data("bs.collapse",n=new o(this,s)),"string"==typeof e&&n[e]()})}var o=function(e,i){this.$element=t(e),this.options=t.extend({},o.DEFAULTS,i),this.$trigger=t('[data-toggle="collapse"][href="#'+e.id+'"],[data-toggle="collapse"][data-target="#'+e.id+'"]'),this.transitioning=null,this.options.parent?this.$parent=this.getParent():this.addAriaAndCollapsedClass(this.$element,this.$trigger),this.options.toggle&&this.toggle()};o.VERSION="3.3.7",o.TRANSITION_DURATION=350,o.DEFAULTS={toggle:!0},o.prototype.dimension=function(){var t=this.$element.hasClass("width");return t?"width":"height"},o.prototype.show=function(){if(!this.transitioning&&!this.$element.hasClass("in")){var e,n=this.$parent&&this.$parent.children(".panel").children(".in, .collapsing");if(!(n&&n.length&&(e=n.data("bs.collapse"),e&&e.transitioning))){var s=t.Event("show.bs.collapse");if(this.$element.trigger(s),!s.isDefaultPrevented()){n&&n.length&&(i.call(n,"hide"),e||n.data("bs.collapse",null));var a=this.dimension();this.$element.removeClass("collapse").addClass("collapsing")[a](0).attr("aria-expanded",!0),this.$trigger.removeClass("collapsed").attr("aria-expanded",!0),this.transitioning=1;var r=function(){this.$element.removeClass("collapsing").addClass("collapse in")[a](""),this.transitioning=0,
this.$element.trigger("shown.bs.collapse")};if(!t.support.transition)return r.call(this);var l=t.camelCase(["scroll",a].join("-"));this.$element.one("bsTransitionEnd",t.proxy(r,this)).emulateTransitionEnd(o.TRANSITION_DURATION)[a](this.$element[0][l])}}}},o.prototype.hide=function(){if(!this.transitioning&&this.$element.hasClass("in")){var e=t.Event("hide.bs.collapse");if(this.$element.trigger(e),!e.isDefaultPrevented()){var i=this.dimension();this.$element[i](this.$element[i]())[0].offsetHeight,this.$element.addClass("collapsing").removeClass("collapse in").attr("aria-expanded",!1),this.$trigger.addClass("collapsed").attr("aria-expanded",!1),this.transitioning=1;var n=function(){this.transitioning=0,this.$element.removeClass("collapsing").addClass("collapse").trigger("hidden.bs.collapse")};return t.support.transition?void this.$element[i](0).one("bsTransitionEnd",t.proxy(n,this)).emulateTransitionEnd(o.TRANSITION_DURATION):n.call(this)}}},o.prototype.toggle=function(){this[this.$element.hasClass("in")?"hide":"show"]()},o.prototype.getParent=function(){return t(this.options.parent).find('[data-toggle="collapse"][data-parent="'+this.options.parent+'"]').each(t.proxy(function(i,o){var n=t(o);this.addAriaAndCollapsedClass(e(n),n)},this)).end()},o.prototype.addAriaAndCollapsedClass=function(t,e){var i=t.hasClass("in");t.attr("aria-expanded",i),e.toggleClass("collapsed",!i).attr("aria-expanded",i)};var n=t.fn.collapse;t.fn.collapse=i,t.fn.collapse.Constructor=o,t.fn.collapse.noConflict=function(){return t.fn.collapse=n,this},t(document).on("click.bs.collapse.data-api",'[data-toggle="collapse"]',function(o){var n=t(this);n.attr("data-target")||o.preventDefault();var s=e(n),a=s.data("bs.collapse"),r=a?"toggle":n.data();i.call(s,r)})}(jQuery),+function(t){"use strict";function e(i,o){this.$body=t(document.body),this.$scrollElement=t(t(i).is(document.body)?window:i),this.options=t.extend({},e.DEFAULTS,o),this.selector=(this.options.target||"")+" .nav li > a",this.offsets=[],this.targets=[],this.activeTarget=null,this.scrollHeight=0,this.$scrollElement.on("scroll.bs.scrollspy",t.proxy(this.process,this)),this.refresh(),this.process()}function i(i){return this.each(function(){var o=t(this),n=o.data("bs.scrollspy"),s="object"==typeof i&&i;n||o.data("bs.scrollspy",n=new e(this,s)),"string"==typeof i&&n[i]()})}e.VERSION="3.3.7",e.DEFAULTS={offset:10},e.prototype.getScrollHeight=function(){return this.$scrollElement[0].scrollHeight||Math.max(this.$body[0].scrollHeight,document.documentElement.scrollHeight)},e.prototype.refresh=function(){var e=this,i="offset",o=0;this.offsets=[],this.targets=[],this.scrollHeight=this.getScrollHeight(),t.isWindow(this.$scrollElement[0])||(i="position",o=this.$scrollElement.scrollTop()),this.$body.find(this.selector).map(function(){var e=t(this),n=e.data("target")||e.attr("href"),s=/^#./.test(n)&&t(n);return s&&s.length&&s.is(":visible")&&[[s[i]().top+o,n]]||null}).sort(function(t,e){return t[0]-e[0]}).each(function(){e.offsets.push(this[0]),e.targets.push(this[1])})},e.prototype.process=function(){var t,e=this.$scrollElement.scrollTop()+this.options.offset,i=this.getScrollHeight(),o=this.options.offset+i-this.$scrollElement.height(),n=this.offsets,s=this.targets,a=this.activeTarget;if(this.scrollHeight!=i&&this.refresh(),e>=o)return a!=(t=s[s.length-1])&&this.activate(t);if(a&&e<n[0])return this.activeTarget=null,this.clear();for(t=n.length;t--;)a!=s[t]&&e>=n[t]&&(void 0===n[t+1]||e<n[t+1])&&this.activate(s[t])},e.prototype.activate=function(e){this.activeTarget=e,this.clear();var i=this.selector+'[data-target="'+e+'"],'+this.selector+'[href="'+e+'"]',o=t(i).parents("li").addClass("active");o.parent(".dropdown-menu").length&&(o=o.closest("li.dropdown").addClass("active")),o.trigger("activate.bs.scrollspy")},e.prototype.clear=function(){t(this.selector).parentsUntil(this.options.target,".active").removeClass("active")};var o=t.fn.scrollspy;t.fn.scrollspy=i,t.fn.scrollspy.Constructor=e,t.fn.scrollspy.noConflict=function(){return t.fn.scrollspy=o,this},t(window).on("load.bs.scrollspy.data-api",function(){t('[data-spy="scroll"]').each(function(){var e=t(this);i.call(e,e.data())})})}(jQuery),+function(t){"use strict";function e(){var t=document.createElement("bootstrap"),e={WebkitTransition:"webkitTransitionEnd",MozTransition:"transitionend",OTransition:"oTransitionEnd otransitionend",transition:"transitionend"};for(var i in e)if(void 0!==t.style[i])return{end:e[i]};return!1}t.fn.emulateTransitionEnd=function(e){var i=!1,o=this;t(this).one("bsTransitionEnd",function(){i=!0});var n=function(){i||t(o).trigger(t.support.transition.end)};return setTimeout(n,e),this},t(function(){t.support.transition=e(),t.support.transition&&(t.event.special.bsTransitionEnd={bindType:t.support.transition.end,delegateType:t.support.transition.end,handle:function(e){return t(e.target).is(this)?e.handleObj.handler.apply(this,arguments):void 0}})})}(jQuery);
|
# -*- coding:UTF-8 -*-
# @Time: 2019/8/25 17:01
# @Author: wyd
# @File: day09
list3 = [
{'name': 'admin', 'hobby': '抽烟'},
{'name': 'admin', 'hobby': '喝酒'},
{'name': 'admin', 'hobby': '烫头'},
{'name': 'admin', 'hobby': 'Massage'},
{'name': 'root', 'hobby': '喊麦'},
{'name': 'root', 'hobby': '街舞'},
{'name': 'taibai', 'hobby': '开车'},
{'name': 'taibai', 'hobby': '嫂子'},
]
list4 = []
for ren in list3: # {'name': 'admin', 'hobby': '抽烟'},
for el in list4:
if el['name'] == ren['name']:
el['hobby_list'].append(ren['hobby'])
break
else:
dic = {}
dic['name'] = ren['name']
dic['hobby_list'] = [ren['hobby']]
list4.append(dic) # 第一个人进去
print(list4)
|
jQuery(function(a){a.datepicker.regional.km={closeText:"ធ្វើរួច",prevText:"មុន",nextText:"បន្ទាប់",currentText:"ថ្ងៃនេះ",monthNames:["មករា","កុម្ភៈ","មីនា","មេសា","ឧសភា","មិថុនា","កក្កដា","សីហា","កញ្ញា","តុលា","វិច្ឆិកា","ធ្នូ"],monthNamesShort:["មករា","កុម្ភៈ","មីនា","មេសា","ឧសភា","មិថុនា","កក្កដា","សីហា","កញ្ញា","តុលា","វិច្ឆិកា","ធ្នូ"],dayNames:["អាទិត្យ","ចន្ទ","អង្គារ","ពុធ","ព្រហស្បតិ៍","សុក្រ","សៅរ៍"],dayNamesShort:["អា","ច","អ","ពុ","ព្រហ","សុ","សៅ"],dayNamesMin:["អា","ច","អ","ពុ","ព្រហ","សុ","សៅ"],weekHeader:"សប្ដាហ៍",dateFormat:"dd-mm-yy",firstDay:1,isRTL:false,showMonthAfterYear:false,yearSuffix:""};a.datepicker.setDefaults(a.datepicker.regional.km)});
|
/**
* Front End Interview Questions: map() vs filter() vs reduce()
*
* Q: What do map(), filter() and reduce() do?
*/
const nums = [1, 2, 3, 4, 5];
const numsAddOne = nums.map((value) => value + 1);
console.log(numsAddOne) // [2, 3, 4, 5, 6]
const evenNums = nums.filter((value) => value % 2 === 0 );
console.log(evenNums) // [2, 4]
console.log(nums) // [1, 2, 3, 4, 5]
const sum = nums.reduce((total, current) => {
return total + current;
}, 0)
console.log(sum); // 15
|
# pylint: disable=eval-used
# pylint: disable=unused-import
import os
import numpy as np
import plotly.graph_objects as go
import pytest
from scipy.spatial.transform import Rotation as R
import magpylib as magpy
from magpylib._src.display.base_traces import make_Prism
magpy.defaults.display.backend = "plotly"
def make_wheel(Ncubes=6, height=10, diameter=36, path_len=5, label=None):
"""creates a basic Collection Compound object with a rotary arrangement of cuboid magnets"""
cs_lambda = lambda: magpy.magnet.Cuboid(
(1, 0, 0), dimension=[height] * 3, position=(diameter / 2, 0, 0)
)
s0 = cs_lambda().rotate_from_angax(
np.linspace(0.0, 360.0, Ncubes, endpoint=False), "z", anchor=(0, 0, 0), start=0
)
c = magpy.Collection()
for ind in range(Ncubes):
s = cs_lambda()
s.position = s0.position[ind]
s.orientation = s0.orientation[ind]
c.add(s)
c.rotate_from_angax(90, "x")
c.rotate_from_angax(
np.linspace(90, 360, path_len), axis="z", start=0, anchor=(80, 0, 0)
)
c.move(np.linspace((0, 0, 0), (0, 0, 200), path_len), start=0)
c.style.label = label
trace = make_Prism(
"plotly",
base=Ncubes,
diameter=diameter + height * 2,
height=height * 0.5,
opacity=0.5,
color="blue",
)
c.style.model3d.data = [trace]
return c
def create_compound_set(show=False, **kwargs):
"""creates a styled Collection Compound object with a rotary arrangement of cuboid magnets.
A copy is created to show the difference when applying position and/or orientation setters over
kwargs."""
c1 = make_wheel(label="Magnetic Wheel after")
c1.set_children_styles(
path_show=False,
magnetization_color_north="magenta",
magnetization_color_south="cyan",
)
c2 = make_wheel(label="Magnetic Wheel before")
c2.style.model3d.data[0].kwargs["color"] = "red"
c2.style.model3d.data[0].kwargs["opacity"] = 0.1
c2.set_children_styles(path_show=False, opacity=0.1)
for k, v in kwargs.items():
setattr(c1, k, eval(v))
if show:
fig = go.Figure()
magpy.show(c2, c1, style_path_frames=1, canvas=fig)
fig.layout.title = ", ".join(f"c1.{k} = {v}" for k, v in kwargs.items())
fig.show()
return c1
def get_pos_orient_from_collection(coll):
"""returns a list of (position, orientation.as_matrix()) tuple of a collection and of its
children"""
pos_orient = []
for obj in [coll] + coll.children:
pos_orient.append((obj.position, obj.orientation.as_matrix()))
return pos_orient
def create_compound_test_data(path=None):
"""creates tests data for compound setters testing"""
setters = [
("orientation=None", dict(orientation="None")),
("shorter position path", dict(position="np.array([[50, 0, 100]] * 2)")),
(
"shorter orientation path",
dict(orientation="R.from_rotvec([[90,0,0],[0,90,0]], degrees=True)"),
),
(
"longer position path",
dict(position="np.array(np.linspace((280.,0.,0), (280.,0.,300), 8))"),
),
(
"longer orientation path",
dict(
orientation="R.from_rotvec([[0,90*i,0] for i in range(6)], degrees=True)"
),
),
]
data = {"test_names": [], "setters_inputs": [], "pos_orient_as_matrix_expected": []}
for setter in setters:
tname, kwargs = setter
coll = create_compound_set(**kwargs)
pos_orient = get_pos_orient_from_collection(coll)
data["test_names"].append(tname)
data["setters_inputs"].append(kwargs)
data["pos_orient_as_matrix_expected"].append(pos_orient)
if path is None:
return data
np.save(path, data)
def display_compound_test_data(path):
"""display compound test data from file"""
data = np.load(path, allow_pickle=True).item()
for kwargs in data["setters_inputs"]:
create_compound_set(show=True, **kwargs)
folder = "tests/testdata"
file = os.path.join(folder, "testdata_compound_setter_cases.npy")
# create_compound_test_data(file)
COMPOUND_DATA = np.load(file, allow_pickle=True).item()
@pytest.mark.parametrize(
"setters_inputs, pos_orient_as_matrix_expected",
[
(si, po)
for si, po in zip(
COMPOUND_DATA["setters_inputs"],
COMPOUND_DATA["pos_orient_as_matrix_expected"],
)
],
ids=COMPOUND_DATA["test_names"],
)
def test_compound_setters(setters_inputs, pos_orient_as_matrix_expected):
"""testing of compound object setters and the effects on its children."""
c1 = create_compound_set(**setters_inputs)
pos_orient = get_pos_orient_from_collection(c1)
for ind, (po, po_exp) in enumerate(zip(pos_orient, pos_orient_as_matrix_expected)):
obj_str = "child"
if ind == 0: # first ind is (position, orientation.as_matrix()) of collection
obj_str = "Collection"
pos, orient = po
pos_exp, orient_exp = po_exp
err_msg = f"{obj_str} position matching failed"
np.testing.assert_almost_equal(pos, pos_exp, err_msg=err_msg)
err_msg = f"{obj_str}{ind if ind!=0 else ''} orientation matching failed"
np.testing.assert_almost_equal(orient, orient_exp, err_msg=err_msg)
|
import re
import codecs
def extractTitle(text):
tt = text.split('\n')
text = ""
for t in tt:
if len(t) > 1:
text += t+"\n"
title = re.search(r'PICES SCIENTIFIC REPORT(\s*)No\.(\s*)([0-9]+),?(\s*)[0-9]{4}([a-zA-z0-9:\.,\-\s/\\()]+)( \n)*', text, re.IGNORECASE|re.UNICODE)
if title is None:
return ''
_title = title.group(0)
tt = _title.split('\n')
_title = ''
for t in tt:
if len(t) > 2:
_title += t + '\n'
else:
break
return _title
# pdf = PDFContainer(format="text", codec='utf-8')
# pdf.convertPDF("53.pdf")
# txt = pdf.getPage(0)
# txts = txt.split('\n')
#
# TXTfile = codecs.open("out1.txt", "w", "utf-8")
# TXTfile.write(txt)
# TXTfile.close()
#txt = ''
#with codecs.open("out1.txt", "r", "utf-8") as f:
# txt = f.read()
#
#title = re.search(r'PICES SCIENTIFIC REPORT(\s*)No\.(\s*)([0-9]+),(\s*)[0-9]{4}([a-zA-z0-9\.,\-\s]*)ISBN', txt) .group(0)
# isTitle = False
# isYear = False
# isNo = False
# isStart = False
# title = ''
# for t in txts:
# if not isStart and re.match(r'PICES SCIENTIFIC REPORT', t) == None:
# isStart = True
# continue
# _re = re.match(r'\s*', t)
# if _re == None:
# if isTitle:
# break
# else:
# continue
#
#
# _re = re.search(r'[0-9]{4}', t)
# if _re != None:
# title += 'Year ' + _re.group(0) + '\n'
# isYear = True
#
# _re = re.search(r'No\.(\s*)[0-9]{1:}', t)
# if _re != None:
# title += _re.group(0) + '\n'
# isNo = True
#
# if not isYear or not isNo:
# continue
#
# _re0 = re.search(r'ISBN', t)
# _re1 = re.search(r'ISSN', t)
# if _re0 != None or _re1 != None:
# break
# else:
# isTitle = True
# title += t
#print(title)
|
import machine
import utime
import vl53l0x
# declare pins
SDA_PIN = const(4)
SCL_PIN = const(5)
# define software I2C bus (needed for ESP8266).
# alternatively hardware I2C bus (ESP32 only) can be used by passing 0 or 1 to
# constructor, i.e.: i2c = I2C(0, scl=Pin(5), sda=Pin(4), freq=100000)
# any input pins can be defined for the i2c interface
i2c = machine.SoftI2C(scl=machine.Pin(SCL_PIN), sda=machine.Pin(SDA_PIN), freq=100000)
# create snesor object
sensor = vl53l0x.VL53L0X(i2c)
sensor.start()
print("* VL43L0X Distance *")
while True:
# read sensor value
distance = sensor.read()
# print readings to console
# {} is used in conjunction with format() for substitution.
# .1f - format to 1 decimal places.
print("Distance: {:.1f} mm".format(distance), end='\r')
utime.sleep_ms(500)
|
'use strict';
/**
* Module dependencies.
*/
var taxesPolicy = require('../policies/taxes.server.policy'),
taxes = require('../controllers/taxes.server.controller');
module.exports = function (app) {
// Taxes collection routes
app.route('/api/taxes').all(taxesPolicy.isAllowed)
.get(taxes.list)
.post(taxes.create);
// Single taxe routes
app.route('/api/taxes/:taxesId').all(taxesPolicy.isAllowed)
.get(taxes.read)
.put(taxes.update)
.delete(taxes.delete);
// Finish by binding the taxe middleware
app.param('taxesId', taxes.taxeByID);
};
|
#ifndef QTIPCSERVER_H
#define QTIPCSERVER_H
// Define AMITY-Qt message queue name
#define BITCOINURI_QUEUE_NAME "AMITYURI"
void ipcScanRelay(int argc, char *argv[]);
void ipcInit(int argc, char *argv[]);
#endif // QTIPCSERVER_H
|
'use strict';
class Model {
/**
*
* @param {Object} schema mongo schema
*/
constructor(schema) {
this.schema = schema;
}
/**
*
* @param {String} _id optional for mongo record id
* @return {*} return record by id if _id !empty else will get all records
*/
read(_id) {
let queryParam = _id ? {
_id,
} : {};
return this.schema.find(queryParam);
}
/**
*
* @param {Object} record must match schema format
* @return {*}
*/
create(record) {
let queryRecord = this.schema(record);
return queryRecord.save(record);
}
/**
*
* @param {String} _id mongo record id
* @param {Object} record must match schema format
* @return {*}
*/
update(_id, record) {
return this.schema.findByIdAndUpdate(_id, record, {
new: true,
});
}
/**
*
* @param {String} _id mongo record id
* @param {Object} record must match schema format
* @return {*}
*/
patch(_id, record) {
return this.schema.findByIdAndUpdate(_id, record, {
new: true,
});
}
/**
*
* @param {String} _id mongo record id
* @return {*}
*/
delete(_id) {
return this.schema.findByIdAndDelete(_id);
}
}
module.exports = Model;
|
from . whie_button import WhiteButton
from app.packages.pyside_or_pyqt import *
class PomoButton(WhiteButton):
def __init__(self, parent, name, icon, width, height):
WhiteButton.__init__(self, parent, name, icon, width, height)
self.is_original = True
self.icon_original = icon
self.icon_stop = "images/icons_svg/stop.png"
def mousePressEvent(self, event):
if event.button() == Qt.LeftButton:
# EMIT SIGNAL
self.clicked.emit()
# SET FOCUS
self.setFocus()
self.repaint()
def icon_paint(self, qp, image, rect):
#if button active => active image
image = self.icon_original if self.is_original else self.icon_stop
icon = QPixmap(image)
painter = QPainter(icon)
painter.setCompositionMode(QPainter.CompositionMode_SourceIn)
painter.fillRect(icon.rect(), self._set_icon_color)
qp.drawPixmap(
(rect.width() - icon.width()) / 2,
(rect.height() - icon.height()) / 2,
icon
)
painter.end()
def stop_icon(self):
self.is_original = False
def original_icon(self):
self.is_original = True
self.repaint()
|
/*
* Arm SCP/MCP Software
* Copyright (c) 2017-2018, Arm Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*
* Description:
* Software defined memory map shared between SCP and AP cores.
*/
#ifndef SOFTWARE_MMAP_H
#define SOFTWARE_MMAP_H
#include <fwk_macros.h>
#include <system_mmap.h>
/*
* The 4KiB AP/SCP Shared memory at the base of Trusted SRAM is used for several
* purposes. These are: the Shared Data Storage (SDS) Memory Region, the SCMI
* secure payload areas, and the context area for Application Processor
* firmware.
*
* Shared Data Storage (SDS) Memory Region: Used for structured storage of data
* that is shared between SCP Firmware and Application Processor firmware. The
* SDS Memory Region occupies the area between the context region base and
* the SCMI Secure Payload base.
*
* SCMI Secure Payload Areas: Storage for SCMI message contents in both the
* Agent->Platform and Platform->Agent directions.
*
* Application Processor Context Area: The usage of this area is defined by the
* firmware running on the Application Processors. The SCP Firmware must zero
* this memory before releasing any Application Processors. This area must
* always be located in the top 64 bytes of the 4KiB reserved region.
*
* +-----------------------+ 4096
* | |
* 64B | AP Context Area |
* | |
* +-----------------------+
* | |
* 256B | Unused |
* | |
* +-----------------------+
* | |
* | SCMI Sec. Payload |
* 128B | Platform to Agent |
* | |
* +-----------------------+
* | |
* 128B | SCMI Sec. Payload |
* | Agent to Platform |
* | |
* +-----------------------+
* | |
* 3520B | SDS Memory Region |
* | |
* +-----------------------+ 0
*/
/* Secure shared memory at the base of Trusted SRAM */
#define SHARED_SECURE_BASE (TRUSTED_RAM_BASE)
#define SHARED_SECURE_SIZE (4 * FWK_KIB)
/* SDS Memory Region */
#define SDS_MEM_BASE (SHARED_SECURE_BASE)
#define SDS_MEM_SIZE (3520)
/* AP Context Area */
#define AP_CONTEXT_BASE (SHARED_SECURE_BASE + SHARED_SECURE_SIZE - \
AP_CONTEXT_SIZE)
#define AP_CONTEXT_SIZE (64)
/* SCMI Secure Payload Areas */
#define SCMI_PAYLOAD_SIZE (128)
#define SCMI_PAYLOAD_S_A2P_BASE (SDS_MEM_BASE + SDS_MEM_SIZE)
#define SCMI_PAYLOAD_S_P2A_BASE (SCMI_PAYLOAD_S_A2P_BASE + SCMI_PAYLOAD_SIZE)
/*
* The 4KiB AP/SCP Shared memory at the base of Non-trusted SRAM is used for the
* SCMI non-secure payload areas.
*
* Two SCMI non-Secure Payload Areas: Storage for SCMI message contents in both
* the Agent->Platform and Platform->Agent directions.
*
* +-----------------------+ 4096
* 3584B | Unused |
* +-----------------------+
* | |
* | Non-Sec. Channel 1 |
* | SCMI non-Sec. Payload |
* 128B | Platform to Agent |
* | |
* +-----------------------+
* | |
* | Non-Sec. Channel 1 |
* 128B | SCMI non-Sec. Payload |
* | Agent to Platform |
* | |
* +-----------------------+
* | |
* | Non-Sec. Channel 0 |
* | SCMI non-Sec. Payload |
* 128B | Platform to Agent |
* | |
* +-----------------------+
* | |
* | Non-Sec. Channel 0 |
* 128B | SCMI non-Sec. Payload |
* | Agent to Platform |
* | |
* +-----------------------+ 0
*/
/* Non-secure shared memory at the base of Non-trusted SRAM */
#define SHARED_NONSECURE_BASE (NONTRUSTED_RAM_BASE)
#define SHARED_NONSECURE_SIZE (4 * FWK_KIB)
/* SCMI Non-Secure Payload Areas */
#define SCMI_PAYLOAD0_NS_A2P_BASE (SHARED_NONSECURE_BASE)
#define SCMI_PAYLOAD0_NS_P2A_BASE (SCMI_PAYLOAD0_NS_A2P_BASE + \
SCMI_PAYLOAD_SIZE)
#define SCMI_PAYLOAD1_NS_A2P_BASE (SCMI_PAYLOAD0_NS_P2A_BASE + \
SCMI_PAYLOAD_SIZE)
#define SCMI_PAYLOAD1_NS_P2A_BASE (SCMI_PAYLOAD1_NS_A2P_BASE + \
SCMI_PAYLOAD_SIZE)
#endif /* SOFTWARE_MMAP_H */
|
// Copyright (c) 2000-2005 Quadralay Corporation. All rights reserved.
//
function WWHPopupFormat_Translate(ParamText)
{
return ParamText;
}
function WWHPopupFormat_Format(ParamWidth,
ParamTextID,
ParamText)
{
var FormattedText = "";
var BackgroundColor = WWHFrame.WWHHelp.mSettings.mPopup.mBackgroundColor;
var BorderColor = WWHFrame.WWHHelp.mSettings.mPopup.mBorderColor;
var ImageDir = WWHFrame.WWHHelp.mHelpURLPrefix + "wwhelp/wwhimpl/common/images";
var ReqSpacer1w2h = "<img src=\"" + ImageDir + "/spc1w2h.gif\" width=\"1\" height=\"2\">";
var ReqSpacer2w1h = "<img src=\"" + ImageDir + "/spc2w1h.gif\" width=\"2\" height=\"1\">";
var StyleAttribute;
// Set style attribute to insure small image height
//
StyleAttribute = " style=\"font-size: 1px; line-height: 1px;\"";
FormattedText += "<table width=\"4\" border=\"0\" cellspacing=\"0\" cellpadding=\"0\" bgcolor=\"" + BackgroundColor + "\">";
FormattedText += " <tr>";
FormattedText += " <td" + StyleAttribute + " height=\"2\" colspan=\"3\" bgcolor=\"" + BorderColor + "\">" + ReqSpacer1w2h + "</td>";
FormattedText += " </tr>";
FormattedText += " <tr>";
FormattedText += " <td bgcolor=\"" + BorderColor + "\">" + ReqSpacer2w1h + "</td>";
FormattedText += " <td width=\"100%\" id=\"" + ParamTextID + "\">" + ParamText + "</td>";
FormattedText += " <td bgcolor=\"" + BorderColor + "\">" + ReqSpacer2w1h + "</td>";
FormattedText += " </tr>";
FormattedText += " <tr>";
FormattedText += " <td" + StyleAttribute + " height=\"2\" colspan=\"3\" bgcolor=\"" + BorderColor + "\">" + ReqSpacer1w2h + "</td>";
FormattedText += " </tr>";
FormattedText += "</table>";
return FormattedText;
}
|
/****************************** RMX SDK ******************************\
* Copyright (c) 2007 Vincent E. Milum Jr., All rights reserved. *
* *
* See license.txt for more information *
* *
* Latest SDK versions can be found at: http://rmx.sourceforge.net *
\***********************************************************************/
#ifndef _CFG_BOOL_H_
#define _CFG_BOOL_H_
#include "cfgBase.h"
/*
this class uses UserData to store its displayed boolean value
*/
class cfgBool : public cfgBase {
public:
cfgBool(const char *initname, cfgBase *parent=NULL, cfgBase *insert=NULL, HPARAMLIST paramlist=NULL);
virtual ~cfgBool();
public:
void setValue(BOOL value, BOOL force=FALSE);
BOOL getValue() const;
protected:
virtual void validateUserData(prmBase *userdata);
};
#endif //_CFG_BOOL_H_
|
function(view, data, actionIndex, rowIndex, event) {
if (data == null) {
data = $(view.getId("form")).serialize();
}
showLoading();
got.ajax({
cache : true,
type : "POST",
url : "getGridData",
dataType : "json",
data : data,
async : true,
error : function(res, ts, e) {
hideLoading();
$.messager.alert('提示', "检索错误:" + ts,'error');
},
success : function(returnData) {
hideLoading();
if (returnData == null || !returnData.success) {
if (returnData != null && returnData.errorMsg) {
$.messager.alert('提示', returnData.errorMsg, 'info');
} else {
$.messager.alert('提示', '检索错误', 'info');
}
return;
}
view.data = returnData;
var list = returnData.data;// realResult.list;
var opts = '';
var listButtons = [];
var menuButtons = [];
var maxButtons = 5;
var shownBtnCount = 0;
var normalBtns = [];
var moreBtns = [];
var restBtns = [];
var currentBtns = normalBtns;
var opts;
for ( var i = 0; i < list.length; i++) {
if (view.otherActions && view.otherActions.inList) {
shownBtnCount = 0;
normalBtns = [];
moreBtns = [];
restBtns = [];
currentBtns = normalBtns;
var actionsInList = view.otherActions.inList;
var realShowButtonCount = 0;
for ( var j = 0; j < actionsInList.length; ++j) {
var act = actionsInList[j];
var arg = view.actionArg[act.id];
if (arg != null
&& arg['showByColumn'] != null
&& ((!got.isEmpty(list[i][arg['showByColumn']]) && list[i][arg['showByColumn']] != arg['showValue']) || (got.isEmpty(list[i][arg['showByColumn']]) && arg['showByEmpty'] != '1'))) {
continue;
}
++realShowButtonCount;
}
for ( var j = 0; j < actionsInList.length; ++j) {
var act = actionsInList[j];
var arg = view.actionArg[act.id];
if (arg != null
&& arg['showByColumn'] != null
&& ((!got.isEmpty(list[i][arg['showByColumn']]) && list[i][arg['showByColumn']] != arg['showValue']) || (got.isEmpty(list[i][arg['showByColumn']]) && arg['showByEmpty'] != '1'))) {
continue;
}
++shownBtnCount;
if (shownBtnCount == maxButtons && realShowButtonCount > maxButtons) {
$('#' + view.id + '_moreaction_' + i + '_mm').remove();
$(".menu-shadow").each(function() {
$(this).remove();
});
moreBtns.push('<a href="javascript:void(0)" id="' + view.id + "_moreaction_" + i
+ '" class="easyui-menubutton" data-options="menu:\'#' + view.id + '_moreaction_' + i + '_mm\'">更多</a>');
moreBtns.push('<div id="' + view.id + '_moreaction_' + i + '_mm" style="width:150px;">');
menuButtons.push(view.id + "_moreaction_" + i);
currentBtns = restBtns;
}
if (shownBtnCount >= maxButtons && realShowButtonCount > maxButtons) {
restBtns.push('<div id="' + view.id + "_action_" + act.id + '_' + i + '" ');
} else {
normalBtns.push('<a class="easyui-linkbutton" id="' + view.id + "_action_" + act.id + '_' + i + '" ');
listButtons.push("#" + view.id + "_action_" + act.id + "_" + i);
}
currentBtns.push('data-options="iconCls:\'' + act.icon + '\', disabled:' + (!act.enable) + ', plain:true" ');
if (act.click && act.click != '') {
currentBtns.push('onclick="javascript:' + view.id + '.' + act.click + '(' + view.id + ', null, \'' + act.id + '\', ' + i + ')"');
}
currentBtns.push('>' + act.label);
if (shownBtnCount >= maxButtons && realShowButtonCount > maxButtons) {
restBtns.push('</div>');
} else {
currentBtns.push('</a>');
}
}
opts = normalBtns.join('');
if (shownBtnCount > maxButtons) {
opts += moreBtns.join('') + restBtns.join('') + "</div>";
}
}
list[i]["_FW_ACTIONS"] = opts;
list[i]["_FW_LINKBUTTONS"] = listButtons;
list[i]["_FW_MENUBUTTONS"] = menuButtons;
}
view.getGrid().datagrid({
data : list
});
for ( var i = 0; i < listButtons.length; ++i) {
$(listButtons[i]).linkbutton();
}
for ( var i = 0; i < menuButtons.length; ++i) {
$("#" + menuButtons[i]).menubutton();
}
if (returnData.footerData) {
view.getGrid().datagrid('reloadFooter', returnData.footerData);
}
var pg = returnData.page;
$(view.getId("totalRow")).val(pg.totalRow);
$(view.getId("pageSize")).val(pg.pageSize);
$(view.getId("pageNumber")).val(pg.pageNumber);
view.getPagination().pagination({
total : pg.totalRow,
pageSize : pg.pageSize,
pageNumber : pg.pageNumber
});
$("tr[class='datagrid-header-row'] td[field='" + $(view.getId("sortName")).val() + "'] div").addClass(
"datagrid-sort-" + $(view.getId("sortOrder")).val());
view.getGrid().datagrid('doCellTip', {
onlyShowInterrupt : true,
position : 'bottom'
});
}
});
}
|
/* Напиши фукцнию findLongestWord(string), которая принимает параметром
произвольную строку (в строке будут только слова и пробелы)
и возвращает самое длинное слово в этой строке. */
'use strict';
const findLongestWord = function (string) {
const arrayFromString = string.split(' ');
for (let i = 1; i < arrayFromString.length; i += 1) {
if (arrayFromString[i].length > arrayFromString[0].length) {
arrayFromString[0] = arrayFromString[i];
}
};
return arrayFromString[0];
}
/*
* Вызовы функции для проверки работоспособности твоей реализации.
*/
console.log(findLongestWord('The quick brown fox jumped over the lazy dog')); // 'jumped'
console.log(findLongestWord('Google do a roll')); // 'Google'
console.log(findLongestWord('May the force be with you')); // 'force'
|
from flask_wtf import FlaskForm
from wtforms import StringField,TextAreaField,SubmitField,SelectField
from wtforms.validators import Required
class UpdateProfile(FlaskForm):
bio = TextAreaField('Write a brief bio about you.',validators = [Required()])
submit = SubmitField('Save')
class PitchForm(FlaskForm):
title = StringField('Title', validators=[Required()])
category = SelectField('Category', choices=[('Interview','Interview'),('Product','Product'),('Movie','Movie'),('Life','Life')],validators=[Required()])
post = TextAreaField('Your Pitch', validators=[Required()])
submit = SubmitField('Pitch')
class CommentForm(FlaskForm):
comment = TextAreaField('Leave a comment',validators=[Required()])
submit = SubmitField('Comment')
|
import Path from '@stephenbunch/path';
export default class SchemaPath {
constructor( path, type ) {
this.name = path;
this.pathType = type;
this.accessor = new Path( path );
}
get( object ) {
return this.accessor.get( object );
}
set( object, value ) {
this.accessor.set( object, value );
}
};
|
server_ip = "127.0.0.1"
server_port = "5000"
server_url = "http://" + server_ip + ':' + server_port
print("Running tests on:", server_url)
data = {
"content": "This is a test data"
}
no_id = 'no_such_id'
truth = {
"content": "This is a test truth"
}
|
#include<stdio.h>
void main()
{
int x = 10, y = 15, temp;
temp = x;
x = y;
y = temp;
printf("x = %d and y = %d", x, y);
}
|
# -*- coding: utf-8 -*- #
# Copyright 2016 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""dateutil and datetime with portable timezone and ISO 8601 durations.
This module supports round-trip conversions between strings, datetime objects
and timestamps:
=> ParseDateTime => => GetTimeStampFromDateTime =>
string datetime timestamp
<= FormatDateTime <= <= GetDateTimeFromTimeStamp <=
GetTimeZone(str) returns the tzinfo object for a timezone name. It handles
abbreviations, IANA timezone names, and on Windows translates timezone names to
the closest Windows TimeZone registry equivalent.
LocalizeDateTime(datetime, tzinfo) returns a datetime object relative to the
timezone tzinfo.
ISO 8601 duration/period conversions are also supported:
=> ParseDuration => => GetDateTimePlusDuration =>
string Duration datetime
<= FormatDuration <=
timedelta => GetDurationFromTimeDelta => Duration
This module is biased to the local timezone by default. To operate on timezone
naiive datetimes specify tzinfo=None in all calls that have a timezone kwarg.
The datetime and/or dateutil modules should have covered all of this.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import datetime
import re
from dateutil import parser
from dateutil import tz
from dateutil.tz import _common as tz_common
import enum
from googlecloudsdk.core import exceptions
from googlecloudsdk.core.util import encoding
from googlecloudsdk.core.util import iso_duration
from googlecloudsdk.core.util import times_data
import six
try:
from dateutil import tzwin # pylint: disable=g-import-not-at-top, Windows
except ImportError:
tzwin = None
class Error(exceptions.Error):
"""Base errors for this module."""
class DateTimeSyntaxError(Error):
"""Date/Time string syntax error."""
class DateTimeValueError(Error):
"""Date/Time part overflow error."""
class DurationSyntaxError(Error):
"""Duration string syntax error."""
class DurationValueError(Error):
"""Duration part overflow error."""
tz_common.PY3 = True # MONKEYPATCH!!! Fixes a Python 2 standard module bug.
LOCAL = tz.tzlocal() # The local timezone.
UTC = tz.tzutc() # The UTC timezone.
_MICROSECOND_PRECISION = 6
def _StrFtime(dt, fmt):
"""Convert strftime exceptions to Datetime Errors."""
try:
return dt.strftime(fmt)
except TypeError as e:
if '%Z' not in fmt:
raise DateTimeValueError(six.text_type(e))
# Most likely a non-ascii tzname() in python2. Fall back to +-HH:MM.
return FormatDateTime(dt, fmt.replace('%Z', '%Ez'))
except (AttributeError, OverflowError, ValueError) as e:
raise DateTimeValueError(six.text_type(e))
def _StrPtime(string, fmt):
"""Convert strptime exceptions to Datetime Errors."""
try:
return datetime.datetime.strptime(string, fmt)
except (AttributeError, OverflowError, TypeError) as e:
raise DateTimeValueError(six.text_type(e))
except ValueError as e:
raise DateTimeSyntaxError(six.text_type(e))
def FormatDuration(duration, parts=3, precision=3):
"""Returns an ISO 8601 string representation of the duration.
The Duration format is: "[-]P[nY][nM][nD][T[nH][nM][n[.m]S]]". At least one
part will always be displayed. The 0 duration is "P0". Negative durations
are prefixed by "-". "T" disambiguates months "P2M" to the left of "T" and
minutes "PT5MM" to the right.
Args:
duration: An iso_duration.Duration object.
parts: Format at most this many duration parts starting with largest
non-zero part.
precision: Format the last duration part with precision digits after the
decimal point. Trailing "0" and "." are always stripped.
Raises:
DurationValueError: A Duration numeric constant exceeded its range.
Returns:
An ISO 8601 string representation of the duration.
"""
return duration.Format(parts=parts, precision=precision)
def FormatDurationForJson(duration):
"""Returns a string representation of the duration, ending in 's'.
See the section of
<https://github.com/google/protobuf/blob/master/src/google/protobuf/duration.proto>
on JSON formats.
For example:
>>> FormatDurationForJson(iso_duration.Duration(seconds=10))
10s
>>> FormatDurationForJson(iso_duration.Duration(hours=1))
3600s
>>> FormatDurationForJson(iso_duration.Duration(seconds=1, microseconds=5))
1.000005s
Args:
duration: An iso_duration.Duration object.
Raises:
DurationValueError: A Duration numeric constant exceeded its range.
Returns:
An string representation of the duration.
"""
# Caution: the default precision for formatting floats is also 6, so when
# introducing adjustable precision, make sure to account for that.
num = '{}'.format(round(duration.total_seconds, _MICROSECOND_PRECISION))
if num.endswith('.0'):
num = num[:-len('.0')]
return num + 's'
def ParseDuration(string, calendar=False, default_suffix=None):
"""Parses a duration string and returns a Duration object.
Durations using only hours, miniutes, seconds and microseconds are exact.
calendar=True allows the constructor to use duration units larger than hours.
These durations will be inexact across daylight savings time and leap year
boundaries, but will be "calendar" correct. For example:
2015-02-14 + P1Y => 2016-02-14
2015-02-14 + P365D => 2016-02-14
2016-02-14 + P1Y => 2017-02-14
2016-02-14 + P366D => 2017-02-14
2016-03-13T01:00:00 + P1D => 2016-03-14T01:00:00
2016-03-13T01:00:00 + PT23H => 2016-03-14T01:00:00
2016-03-13T01:00:00 + PT24H => 2016-03-14T03:00:00
Args:
string: The ISO 8601 duration/period string to parse.
calendar: Use duration units larger than hours if True.
default_suffix: Use this suffix if string is an unqualified int.
Raises:
DurationSyntaxError: Invalid duration syntax.
DurationValueError: A Duration numeric constant exceeded its range.
Returns:
An iso_duration.Duration object for the given ISO 8601 duration/period
string.
"""
if default_suffix:
try:
seconds = int(string)
string = '{}{}'.format(seconds, default_suffix)
except ValueError:
pass
try:
return iso_duration.Duration(calendar=calendar).Parse(string)
except (AttributeError, OverflowError) as e:
raise DurationValueError(six.text_type(e))
except ValueError as e:
raise DurationSyntaxError(six.text_type(e))
def GetDurationFromTimeDelta(delta, calendar=False):
"""Returns a Duration object converted from a datetime.timedelta object.
Args:
delta: The datetime.timedelta object to convert.
calendar: Use duration units larger than hours if True.
Returns:
The iso_duration.Duration object converted from a datetime.timedelta object.
"""
return iso_duration.Duration(delta=delta, calendar=calendar)
def GetDateTimePlusDuration(dt, duration):
"""Returns a new datetime object representing dt + duration.
Args:
dt: The datetime object to add the duration to.
duration: The iso_duration.Duration object.
Returns:
A new datetime object representing dt + duration.
"""
return duration.GetRelativeDateTime(dt)
def GetTimeZone(name):
"""Returns a datetime.tzinfo object for name.
Args:
name: A timezone name string, None for the local timezone.
Returns:
A datetime.tzinfo object for name, local timezone if name is unknown.
"""
if name in ('UTC', 'Z'):
return UTC
if name in ('LOCAL', 'L'):
return LOCAL
name = times_data.ABBREVIATION_TO_IANA.get(name, name)
tzinfo = tz.gettz(name)
if not tzinfo and tzwin:
name = times_data.IANA_TO_WINDOWS.get(name, name)
try:
tzinfo = tzwin.tzwin(name)
except WindowsError: # pylint: disable=undefined-variable
pass
return tzinfo
def FormatDateTime(dt, fmt=None, tzinfo=None):
"""Returns a string of a datetime object formatted by an extended strftime().
fmt handles these modifier extensions to the standard formatting chars:
%Nf Limit the fractional seconds to N digits. The default is N=6.
%Ez Format +/-HHMM offsets as ISO RFC 3339 Z for +0000 otherwise +/-HH:MM.
%Oz Format +/-HHMM offsets as ISO RFC 3339 +/-HH:MM.
NOTE: The standard Python 2 strftime() borks non-ascii time parts. It does
so by encoding non-ascii names to bytes, presumably under the assumption that
the return value will be immediately output. This code works around that by
decoding strftime() values to unicode if necessary and then returning either
an ASCII or UNICODE string.
Args:
dt: The datetime object to be formatted.
fmt: The strftime(3) format string, None for the RFC 3339 format in the dt
timezone ('%Y-%m-%dT%H:%M:%S.%3f%Ez').
tzinfo: Format dt relative to this timezone.
Raises:
DateTimeValueError: A DateTime numeric constant exceeded its range.
Returns:
A string of a datetime object formatted by an extended strftime().
"""
if tzinfo:
dt = LocalizeDateTime(dt, tzinfo)
if not fmt:
fmt = '%Y-%m-%dT%H:%M:%S.%3f%Ez'
extension = re.compile('%[1-9]?[EO]?[fsz]')
m = extension.search(fmt)
if not m:
return encoding.Decode(_StrFtime(dt, fmt))
# Split the format into standard and extension parts.
parts = []
start = 0
while m:
match = start + m.start()
if start < match:
# Format the preceding standard part.
parts.append(encoding.Decode(_StrFtime(dt, fmt[start:match])))
# The extensions only have one modifier char.
match += 1
if fmt[match].isdigit():
n = int(fmt[match])
match += 1
else:
n = None
if fmt[match] in ('E', 'O'):
alternate = fmt[match]
match += 1
else:
alternate = None
spec = fmt[match]
std_fmt = '%' + spec
if spec == 'f':
# Round the fractional part to n digits.
val = _StrFtime(dt, std_fmt)
if n and n < len(val):
# Explicitly avoiding implementation dependent floating point rounding
# diffs.
v = int(val[:n]) # The rounded value.
f = int(val[n]) # The first digit after the rounded value.
if f >= 5:
# Round up.
v += 1
zero_fill_format = '{{0:0{n}d}}'.format(n=n)
val = zero_fill_format.format(v)
if len(val) > n:
# All 9's rounded up by 1 overflowed width. Keep the unrounded value.
val = zero_fill_format.format(v - 1)
elif spec == 's':
# datetime.strftime('%s') botches tz aware dt!
val = GetTimeStampFromDateTime(dt)
elif spec == 'z':
# Convert the time zone offset to RFC 3339 format.
val = _StrFtime(dt, std_fmt)
if alternate:
if alternate == 'E' and val == '+0000':
val = 'Z'
elif len(val) == 5:
val = val[:3] + ':' + val[3:]
if val:
parts.append(encoding.Decode(val))
start += m.end()
m = extension.search(fmt[start:])
# Format the trailing part if any.
if start < len(fmt):
parts.append(encoding.Decode(_StrFtime(dt, fmt[start:])))
# Combine the parts.
return ''.join(parts)
class _TzInfoOrOffsetGetter(object):
"""A helper class for dateutil.parser.parse().
Attributes:
_timezone_was_specified: True if the parsed date/time string contained
an explicit timezone name or offset.
"""
def __init__(self):
self._timezone_was_specified = False
def Get(self, name, offset):
"""Returns the tzinfo for name or offset.
Used by dateutil.parser.parse() to convert timezone names and offsets.
Args:
name: A timezone name or None to use offset. If offset is also None then
the local tzinfo is returned.
offset: A signed UTC timezone offset in seconds.
Returns:
The tzinfo for name or offset or the local tzinfo if both are None.
"""
if name or offset:
self._timezone_was_specified = True
if not name and offset is not None:
return offset
return GetTimeZone(name)
@property
def timezone_was_specified(self):
"""True if the parsed date/time string contained an explicit timezone."""
return self._timezone_was_specified
def _SplitTzFromDate(string):
"""Returns (prefix,tzinfo) if string has a trailing tz, else (None,None)."""
try:
match = re.match(r'(.*[\d\s])([^\d\s]+)$', string)
except TypeError:
return None, None
if match:
tzinfo = GetTimeZone(match.group(2))
if tzinfo:
return match.group(1), tzinfo
return None, None
def ParseDateTime(string, fmt=None, tzinfo=LOCAL):
"""Parses a date/time string and returns a datetime.datetime object.
Args:
string: The date/time string to parse. This can be a parser.parse()
date/time or an ISO 8601 duration after Now(tzinfo) or before if prefixed
by '-'.
fmt: The input must satisfy this strptime(3) format string.
tzinfo: A default timezone tzinfo object to use if string has no timezone.
Raises:
DateTimeSyntaxError: Invalid date/time/duration syntax.
DateTimeValueError: A date/time numeric constant exceeds its range.
Returns:
A datetime.datetime object for the given date/time string.
"""
# Check explicit format first.
if fmt:
dt = _StrPtime(string, fmt)
if tzinfo and not dt.tzinfo:
dt = dt.replace(tzinfo=tzinfo)
return dt
# Use tzgetter to determine if string contains an explicit timezone name or
# offset.
defaults = GetDateTimeDefaults(tzinfo=tzinfo)
tzgetter = _TzInfoOrOffsetGetter()
exc = None
try:
dt = parser.parse(string, tzinfos=tzgetter.Get, default=defaults)
if tzinfo and not tzgetter.timezone_was_specified:
# The string had no timezone name or offset => localize dt to tzinfo.
dt = parser.parse(string, tzinfos=None, default=defaults)
dt = dt.replace(tzinfo=tzinfo)
return dt
except OverflowError as e:
exc = exceptions.ExceptionContext(DateTimeValueError(six.text_type(e)))
except (AttributeError, ValueError, TypeError) as e:
exc = exceptions.ExceptionContext(DateTimeSyntaxError(six.text_type(e)))
if not tzgetter.timezone_was_specified:
# Good ole parser.parse() has a tzinfos kwarg that it sometimes ignores.
# Compensate here when the string ends with a tz.
prefix, explicit_tzinfo = _SplitTzFromDate(string)
if explicit_tzinfo:
try:
dt = parser.parse(prefix, default=defaults)
except OverflowError as e:
exc = exceptions.ExceptionContext(
DateTimeValueError(six.text_type(e)))
except (AttributeError, ValueError, TypeError) as e:
exc = exceptions.ExceptionContext(
DateTimeSyntaxError(six.text_type(e)))
else:
return dt.replace(tzinfo=explicit_tzinfo)
try:
# Check if it's an iso_duration string.
return ParseDuration(string).GetRelativeDateTime(Now(tzinfo=tzinfo))
except Error:
# Not a duration - reraise the datetime parse error.
exc.Reraise()
def GetDateTimeFromTimeStamp(timestamp, tzinfo=LOCAL):
"""Returns the datetime object for a UNIX timestamp.
Args:
timestamp: A UNIX timestamp in int or float seconds since the epoch
(1970-01-01T00:00:00.000000Z).
tzinfo: A tzinfo object for the timestamp timezone, None for naive.
Raises:
DateTimeValueError: A date/time numeric constant exceeds its range.
Returns:
The datetime object for a UNIX timestamp.
"""
try:
return datetime.datetime.fromtimestamp(timestamp, tzinfo)
except ValueError as e:
raise DateTimeValueError(six.text_type(e))
def GetTimeStampFromDateTime(dt, tzinfo=LOCAL):
"""Returns the float UNIX timestamp (with microseconds) for dt.
Args:
dt: The datetime object to convert from.
tzinfo: Use this tzinfo if dt is naiive.
Returns:
The float UNIX timestamp (with microseconds) for dt.
"""
if not dt.tzinfo and tzinfo:
dt = dt.replace(tzinfo=tzinfo)
delta = dt - datetime.datetime.fromtimestamp(0, UTC)
return delta.total_seconds()
def LocalizeDateTime(dt, tzinfo=LOCAL):
"""Returns a datetime object localized to the timezone tzinfo.
Args:
dt: The datetime object to localize. It can be timezone naive or aware.
tzinfo: The timezone of the localized dt. If None then the result is naive,
otherwise it is aware.
Returns:
A datetime object localized to the timezone tzinfo.
"""
ts = GetTimeStampFromDateTime(dt, tzinfo=tzinfo)
return GetDateTimeFromTimeStamp(ts, tzinfo=tzinfo)
def Now(tzinfo=LOCAL):
"""Returns a timezone aware datetime object for the current time.
Args:
tzinfo: The timezone of the localized dt. If None then the result is naive,
otherwise it is aware.
Returns:
A datetime object localized to the timezone tzinfo.
"""
return datetime.datetime.now(tzinfo)
def GetDateTimeDefaults(tzinfo=LOCAL):
"""Returns a datetime object of default values for parsing partial datetimes.
The year, month and day default to today (right now), and the hour, minute,
second and fractional second values default to 0.
Args:
tzinfo: The timezone of the localized dt. If None then the result is naive,
otherwise it is aware.
Returns:
A datetime object of default values for parsing partial datetimes.
"""
return datetime.datetime.combine(Now(tzinfo=tzinfo).date(),
datetime.time.min)
def TzOffset(offset, name=None):
"""Returns a tzinfo for offset minutes east of UTC with optional name.
Args:
offset: The minutes east of UTC. Minutes west are negative.
name: The optional timezone name. NOTE: no dst name.
Returns:
A tzinfo for offset seconds east of UTC.
"""
return tz.tzoffset(name, offset * 60) # tz.tzoffset needs seconds east of UTC
class Weekday(enum.Enum):
"""Represents a day of the week."""
MONDAY = 0
TUESDAY = 1
WEDNESDAY = 2
THURSDAY = 3
FRIDAY = 4
SATURDAY = 5
SUNDAY = 6
@classmethod
def Get(cls, day):
day = day.upper()
value = getattr(cls, day, None)
if not value:
raise KeyError('[{}] is not a valid Weekday'.format(day))
return value
def GetWeekdayInTimezone(dt, weekday, tzinfo=LOCAL):
"""Returns the Weekday for dt in the timezone specified by tzinfo.
Args:
dt: The datetime object that represents the time on weekday.
weekday: The day of the week specified as a Weekday enum.
tzinfo: The timezone in which to get the new day of the week in.
Returns:
A Weekday that corresponds to dt and weekday pair localized to the timezone
specified by dt.
"""
localized_dt = LocalizeDateTime(dt, tzinfo)
localized_weekday_offset = dt.weekday() - localized_dt.weekday()
localized_weekday_index = (weekday.value - localized_weekday_offset) % 7
return Weekday(localized_weekday_index)
|
import Announcement from './Announcment';
export default Announcement;
|
#! /usr/bin/env python
from seleniumrequests import Chrome
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.chrome.options import Options
import time
import argparse
MAX_POSTS = 5000
def run_delete():
parser = argparse.ArgumentParser()
parser.add_argument("-E",
"--email",
default=None,
help="Your email address associated with the account")
parser.add_argument("-P",
"--password",
default=None,
help="Your Facebook password")
parser.add_argument("-U",
"--profile-url",
default=None,
help="""
The link to your Facebook profile, e.g. https://www.facebook.com/your.name
""")
args = parser.parse_args()
delete_posts(user_email_address=args.email,
user_password=args.password,
user_profile_url=args.profile_url)
def delete_posts(user_email_address=None,
user_password=None,
user_profile_url=None):
"""
user_email_address: Your Email
user_password: Your password
user_profile_url: Your profile URL
"""
assert all((user_email_address,
user_password,
user_profile_url)), "Insufficient arguments provided"
# The Chrome driver is required because Gecko was having issues
chrome_options = Options()
prefs = {"profile.default_content_setting_values.notifications" : 2}
chrome_options.add_experimental_option("prefs", prefs)
chrome_options.add_argument("start-maximized")
driver = Chrome(chrome_options=chrome_options)
driver.implicitly_wait(10)
driver.get("https://facebook.com")
email = "email"
password = "pass"
login="loginbutton"
emailelement = driver.find_element_by_name(email)
passwordelement = driver.find_element_by_name(password)
emailelement.send_keys(user_email_address)
passwordelement.send_keys(user_password)
loginelement = driver.find_element_by_id(login)
loginelement.click()
if "Two-factor authentication" in driver.page_source:
# Allow time to enter 2FA code
print("Pausing to enter 2FA code")
time.sleep(20)
print("Continuing execution")
driver.get(user_profile_url)
for _ in range(MAX_POSTS):
post_button_sel = "_4xev"
timeline_element = driver.find_element_by_class_name(post_button_sel)
actions = ActionChains(driver)
actions.move_to_element(timeline_element).click().perform()
menu = driver.find_element_by_css_selector("#globalContainer > div.uiContextualLayerPositioner.uiLayer > div")
actions.move_to_element(menu).perform()
try:
delete_button = menu.find_element_by_xpath("//a[@data-feed-option-name=\"FeedDeleteOption\"]")
# FIXME Using a bare except here to avoid having to handle all possible exceptions
except:
delete_button = menu.find_element_by_xpath("//a[@data-feed-option-name=\"HIDE_FROM_TIMELINE\"]")
actions.move_to_element(delete_button).click().perform()
confirmation_button = driver.find_element_by_class_name("layerConfirm")
# Facebook would not let me get focus on this button without some custom JS
driver.execute_script("arguments[0].click();", confirmation_button)
# Required to sleep the thread for a bit after using JS to click this button
time.sleep(5)
driver.refresh()
|
from area53 import route53
from boto.route53.exception import DNSServerError
from kubernetes import client, config
from datetime import datetime
import socket
import time
import os
# Configs can be set in Configuration class directly or using helper utility
config.load_kube_config()
v1 = client.CoreV1Api()
ret = v1.list_pod_for_all_namespaces(watch=False)
for i in ret.items:
key = i.metadata.name
if "notebook" in key:
print(key)
break
os.system("kubectl cp ./debezium/setup-mlflow-connector.sh "+key+":/home/admin")
os.system("kubectl exec -it "+key+" -- /bin/bash setup-mlflow-connector.sh")
|
from flask_restful import Resource, reqparse
from models.user import UserModel
class UserRegister(Resource):
parser = reqparse.RequestParser()
parser.add_argument('username',
type=str,
required=True,
help="This field cannot be blank."
)
parser.add_argument('password',
type=str,
required=True,
help="This field cannot be blank."
)
def post(self):
data = UserRegister.parser.parse_args()
if UserModel.find_by_username(data['username']):
return {"message": "A user with that username already exists"}, 400
user = UserModel(data['username'], data['password'])
user.save_to_db()
return {"message": "User created successfully."}, 201
|
function Todo() {
/*
// calling something
All.call(this, param);
*/
this.todos = [];
this.displayTodos = function () {
// ul to display todo
var displayedTodoField = document.getElementById("displayedTodoField");
// call the functionto reove all li items when displaying todos
removeAll(displayedTodoField);
// if there is todos show theme else display message
if (this.todos.length > 0) {
// var todo$ for todos text's
var todo$,
// var appendingList fo li's tag's
appendingList;
// for displaying
for (var i = 0; i < this.todos.length; i++) {
// var todo$ for todos text's
todo$ = this.todos[i].todoText;
// if completed write (x) before and (_) if not
if (this.todos[i].completed) {
// var appendingList fo li's tag's
appendingList = this.createListItem();
// inserting text's in li's
appendingList.textContent = "(X) " + todo$;
//inserting delete button and note that id's is index +1 of the todo's
appendingList.appendChild(this.createDeleteButton(i));
// inserting li's in the ul
displayedTodoField.appendChild(appendingList);
// logging for test
console.log("(X) " + todo$);
} else {
// var appendingList fo li's tag's
appendingList = this.createListItem();
// inserting text's in li's
appendingList.textContent = "(_) " + todo$;
//inserting delete button and note that id's is index +1 of the todo's
appendingList.appendChild(this.createDeleteButton(i));
// inserting li's in the ul
displayedTodoField.appendChild(appendingList);
// logging for test
console.log("(_) " + todo$);
}
}
} else {
// show the message
console.log("There is no todos");
}
};
// creating list's and making the li tag editable
this.createListItem = function () {
var listItemElement = document.createElement("li");
// making the li tag editable
listItemElement.setAttribute("contenteditable", "true");
// giveing the li a class Nme for event handling
listItemElement.className = "listeItemTodo";
return listItemElement;
};
// adding todo methode to todos array
this.addTodo = function (todoText) {
// pushing the {}
this.todos.push({
//todo text's
todoText: todoText,
// if completede true else false default is false
completed: false
});
// displaying wath we push
this.displayTodos();
};
// deleting a spesific todo
this.deleteTodo = function (pos) {
// eleminate the 0 whene positioning and delete with splice()
this.todos.splice(pos - 1, 1);
// showing wath we delete
this.displayTodos();
};
// creating the delete button
this.createDeleteButton = function (idNumber) {
// creating the button
var deleteButtonElement = document.createElement("button");
// insrerting the text in the button
deleteButtonElement.textContent = "delete";
// inderting the id Note that the id is the index + 1
deleteButtonElement.id = idNumber + 1;
// inserting the class "deleteButton"
deleteButtonElement.className = "deleteButton";
// ineditable for not changing the text
deleteButtonElement.setAttribute("contenteditable", "false");
// return the complete delete button
return deleteButtonElement;
};
// changing the text of a spesefic todo
this.changeTodo = function (pos, todoText) {
// position -1 cause we eliminate the zero
pos = pos - 1;
// replacing the spesefic text
this.todos[pos].todoText = todoText;
// return the todo
this.displayTodos();
};
// changing completed todo to inverse
this.toggleCompleted = function (pos) {
// position -1 cause we eliminate the zero
pos = pos - 1;
// inversingg if true -> false else true
this.todos[pos].completed = !this.todos[pos].completed;
// displaying all todos
this.displayTodos();
};
// inversing all completed todo to a one
// if one all is true all is false if all is false or one false all is true
this.toggleAll = function () {
// counting the completed === true
var count = 0;
//looping into the array
for (var i = 0; i < this.todos.length; i++) {
// counting the completed === true and incrementing count by 1
if (this.todos[i].completed === true) {
count++;
}
}
// if all is true change all to false else chnge all to true
if (count === this.todos.length) {
// change all to false
for (var j = 0; j < this.todos.length; j++) {
this.todos[j].completed = false;
}
} else {
// if one is false and more change all to true
for (var k = 0; k < this.todos.length; k++) {
this.todos[k].completed = true;
}
}
// display all toggled
this.displayTodos();
// reseting count to 0
count = 0;
};
Todo.prototype = Object.create(All.prototype, {
constructor: {
configurable: true,
enumerable: true,
value: Todo,
writable: true
}
});
};
/*
//replacing something
Todo.prototype.toString = function () {
var text = All.prototype.toString.call(this);
return text.replace("xx", "xx");
};
*/
|
/****************************************************************************
* boards/arm/tiva/lm3s8962-ek/src/lm_oled.c
*
* Copyright (C) 2010, 2015 Gregory Nutt. All rights reserved.
* Author: Gregory Nutt <gnutt@nuttx.org>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
* 3. Neither the name NuttX nor the names of its contributors may be
* used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
* OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
****************************************************************************/
/****************************************************************************
* Included Files
****************************************************************************/
#include <nuttx/config.h>
#include <stdio.h>
#include <debug.h>
#include <errno.h>
#include <nuttx/board.h>
#include <nuttx/spi/spi.h>
#include <nuttx/lcd/lcd.h>
#include <nuttx/lcd/p14201.h>
#include "tiva_gpio.h"
#include "tiva_ssi.h"
#include "lm3s8962-ek.h"
/****************************************************************************
* Pre-processor Definitions
****************************************************************************/
/* Define the CONFIG_LCD_RITDEBUG to enable detailed debug output (stuff you
* would never want to see unless you are debugging this file).
*
* Verbose debug must also be enabled
*/
#ifndef CONFIG_DEBUG_FEATURES
# undef CONFIG_DEBUG_INFO
# undef CONFIG_DEBUG_GRAPHICS
#endif
#ifndef CONFIG_DEBUG_INFO
# undef CONFIG_LCD_RITDEBUG
#endif
#ifdef CONFIG_LCD_RITDEBUG
# define riterr(format, ...) _info(format, ##__VA_ARGS__)
# define oleddc_dumpgpio(m) tiva_dumpgpio(OLEDDC_GPIO, m)
# define oledcs_dumpgpio(m) tiva_dumpgpio(OLEDCS_GPIO, m)
#else
# define riterr(x...)
# define oleddc_dumpgpio(m)
# define oledcs_dumpgpio(m)
#endif
/****************************************************************************
* Public Functions
****************************************************************************/
/****************************************************************************
* Name: board_graphics_setup
*
* Description:
* Called NX initialization logic to configure the OLED.
*
****************************************************************************/
FAR struct lcd_dev_s *board_graphics_setup(unsigned int devno)
{
FAR struct spi_dev_s *spi;
FAR struct lcd_dev_s *dev;
/* Configure the OLED GPIOs */
oledcs_dumpgpio("board_graphics_setup: After OLEDCS setup");
oleddc_dumpgpio("board_graphics_setup: On entry");
tiva_configgpio(OLEDDC_GPIO); /* PC7: OLED display data/control select (D/Cn) */
tiva_configgpio(OLEDEN_GPIO); /* PC6: Enable +15V needed by OLED (EN+15V) */
oleddc_dumpgpio("board_graphics_setup: After OLEDDC/EN setup");
/* Get the SSI port (configure as a Freescale SPI port) */
spi = tiva_ssibus_initialize(0);
if (!spi)
{
gerr("ERROR: Failed to initialize SSI port 0\n");
}
else
{
/* Bind the SSI port to the OLED */
dev = rit_initialize(spi, devno);
if (!dev)
{
gerr("ERROR: Failed to bind SSI port 0 to OLED %d: %d\n", devno);
}
else
{
ginfo("Bound SSI port 0 to OLED %d\n", devno);
/* And turn the OLED on (CONFIG_LCD_MAXPOWER should be 1) */
dev->setpower(dev, CONFIG_LCD_MAXPOWER);
return dev;
}
}
return NULL;
}
/****************************************************************************
* Name: tiva_ssicmddata
*
* Description:
* Set or clear the SD1329 D/Cn bit to select data (true) or command
* (false). This function must be provided by platform-specific logic.
* This is an implementation of the cmddata method of the SPI
* interface defined by struct spi_ops_s (see include/nuttx/spi/spi.h).
*
* Input Parameters:
*
* spi - SPI device that controls the bus the device that requires the CMD/
* DATA selection.
* devid - If there are multiple devices on the bus, this selects which one
* to select cmd or data. NOTE: This design restricts, for example,
* one one SPI display per SPI bus.
* cmd - true: select command; false: select data
*
* Returned Value:
* None
*
****************************************************************************/
int tiva_ssicmddata(FAR struct spi_dev_s *dev, uint32_t devid, bool cmd)
{
if (devid == SPIDEV_DISPLAY(0))
{
/* Set GPIO to 1 for data, 0 for command */
tiva_gpiowrite(OLEDDC_GPIO, !cmd);
return OK;
}
return -ENODEV;
}
|
#! /usr/bin/jython
# -*- coding: utf-8 -*-
#
# jython_common/jython_xml_manipulate.py
#
# May/31/2012
import xml.dom.minidom
import datetime
#
import java
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
from java.lang import *
# --------------------------------------------------------------------
import xml.dom.minidom
#import datetime
sys.path.append ('/var/www/data_base/common/python_common')
#
from text_manipulate import dict_append_proc
from to_utf8 import to_utf8_proc
# --------------------------------------------------------------------
def jython_xml_to_dict_proc (xml_str_in):
dict_aa = {}
#
dom_in=xml.dom.minidom.parseString (xml_str_in)
cities=dom_in.getElementsByTagName ("name")
llx=len (cities)
for city in cities:
parent=city.parentNode
id = parent.tagName
name=parent.getElementsByTagName ("name")[0].firstChild.nodeValue
name = to_utf8_proc (name)
population=parent.getElementsByTagName ("population")[0].firstChild.nodeValue
date_mod=parent.getElementsByTagName ("date_mod")[0].firstChild.nodeValue
#
dict_aa = dict_append_proc (dict_aa,id,name,population,date_mod)
#
return dict_aa
# --------------------------------------------------------------------
|
#!/usr/bin/env python2.7
"""
Copyright 2014 Justin Gallardo
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
----
This plugin will hit the monitoring API and check for the latest state of an
alarm. If the state is OK, it will emit a metric alarm_state_ok with the
value 1. If it is not OK, it will emit a metric alarm_state_ok with the value 0.
"""
import argparse
import urllib2
import json
import sys
def main():
las_uri = ('https://monitoring.api.rackspacecloud.com/v1.0/views/'
'latest_alarm_states?entityId={0}').format(args.entity_id)
auth_payload = {
'auth':{
'RAX-KSKEY:apiKeyCredentials':{
'username': args.user,
'apiKey': args.api_key
}
}
}
try:
auth_req = urllib2.Request(args.auth_uri)
auth_req.add_header('Content-type', 'application/json')
auth_resp = json.loads(urllib2.urlopen(auth_req, json.dumps(auth_payload)).read())
except urllib2.HTTPError:
print 'status err Unable to authenticate user {0}'.format(args.user)
sys.exit(1)
else:
auth_token = auth_resp['access']['token']['id']
tenant_id = auth_resp['access']['token']['tenant']['id']
try:
view_req = urllib2.Request(las_uri)
view_req.add_header('X-Auth-Token', auth_token)
view_req.add_header('X-Tenant-Id', tenant_id)
view_resp = json.loads(urllib2.urlopen(view_req).read())
except urllib2.HTTPError:
print 'status err Unable to get latest alarm states for entity {0}'.format(args.entity_id)
sys.exit(1)
else:
alarm_state = None
for las in view_resp['values'][0]['latest_alarm_states']:
if las['alarm_id'] == args.alarm_id:
alarm_state = las['state']
break
if not alarm_state:
print 'status err No latest alarm state for alarm {0}'.format(args.alarm_id)
sys.exit(1)
print 'status Successfully grabbed latest alarm state for {0}:{1}'.format(args.entity_id, args.alarm_id)
if alarm_state == 'OK':
print 'metric alarm_state_ok int 1'
else:
print 'metric alarm_state_ok int 0'
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Latest alarm state plugin')
parser.add_argument('--entity-id', dest='entity_id', action='store',
required=True, help='The entity id')
parser.add_argument('--alarm-id', dest='alarm_id', action='store',
required=True, help='The alarm id')
parser.add_argument('--user', dest='user', action='store',
required=True, help='The Rackspace user')
parser.add_argument('--api-key', dest='api_key', action='store',
required=True, help='The Rackspace API key')
parser.add_argument('--auth-uri', dest='auth_uri', action='store',
default='https://identity.api.rackspacecloud.com/v2.0/tokens',
help='The Rackspace Identity token endpoint')
args = parser.parse_args()
main()
|
var express = require("express");
var router = express.Router();
var bagel = require("../models/bagel.js");
router.get("/", function(req, res) {
bagel.selectAll(function(data) {
var bagelsObject = {
bagels: data
};
console.log(bagelsObject);
res.render("index", bagelsObject);
});
});
router.post("/api/bagels", function(req, res) {
bagel.insertOne(
"bagel_name",
[req.body.bagel_name], function(result) {
// Send back the ID of the new quote
res.json({ id: result.insertId });
});
});
router.put("/api/bagels/:id", function(req, res) {
var condition = "id = " + req.params.id;
console.log("condition", condition);
bagel.updateOne({
devoured: req.body.devoured
}, condition, function(result) {
if (result.changedRows == 0) {
// If no rows were changed, then the ID must not exist, so 404
return res.status(404).end();
} else {
res.status(200).end();
}
});
});
module.exports = router;
|
import array
import collections
import os
import hightime
import numpy
import pytest
import nidigital
instruments = ['PXI1Slot2', 'PXI1Slot5']
test_files_base_dir = os.path.join(os.path.dirname(__file__), 'test_files')
@pytest.fixture(scope='function')
def multi_instrument_session():
with nidigital.Session(resource_name=','.join(instruments), options='Simulate=1, DriverSetup=Model:6570') as simulated_session:
yield simulated_session
def test_close():
session = nidigital.Session(resource_name=','.join(instruments), options='Simulate=1, DriverSetup=Model:6570')
session.vil = 1
session.close()
try:
session.vil = 1
assert False
except nidigital.Error as e:
assert e.code == -1074130544
def test_reset(multi_instrument_session):
multi_instrument_session.selected_function = nidigital.SelectedFunction.PPMU
assert multi_instrument_session.selected_function == nidigital.SelectedFunction.PPMU
multi_instrument_session.reset()
assert multi_instrument_session.selected_function == nidigital.SelectedFunction.DISCONNECT
def test_reset_device(multi_instrument_session):
multi_instrument_session.selected_function = nidigital.SelectedFunction.PPMU
assert multi_instrument_session.selected_function == nidigital.SelectedFunction.PPMU
multi_instrument_session.reset_device()
assert multi_instrument_session.selected_function == nidigital.SelectedFunction.DISCONNECT
def test_self_test(multi_instrument_session):
multi_instrument_session.self_test()
def test_get_error(multi_instrument_session):
try:
multi_instrument_session.supported_instrument_models = ''
assert False
except nidigital.Error as e:
assert e.code == -1074135027
assert e.description.find('Attribute is read-only.') != -1
def test_self_calibrate(multi_instrument_session):
multi_instrument_session.self_calibrate()
def test_channels_rep_cap(multi_instrument_session):
multi_instrument_session.load_pin_map(os.path.join(test_files_base_dir, "pin_map.pinmap"))
multi_instrument_session.vil = 1
ch_0_63 = multi_instrument_session.get_channel_names(indices=[0, 63])
multi_instrument_session.channels[ch_0_63].vil = 2
assert multi_instrument_session.pins[ch_0_63].vil == pytest.approx(2, abs=1e-3)
ch_1 = multi_instrument_session.get_channel_names(indices=1)
assert multi_instrument_session.pins[ch_1].vil == pytest.approx(1, abs=1e-3)
def test_sites_rep_cap(multi_instrument_session):
multi_instrument_session.load_pin_map(os.path.join(test_files_base_dir, "pin_map.pinmap"))
assert multi_instrument_session.sites[0].is_site_enabled()
assert multi_instrument_session.sites[1].is_site_enabled()
multi_instrument_session.sites[0, 1].disable_sites()
assert not multi_instrument_session.sites[0].is_site_enabled()
assert not multi_instrument_session.sites[1].is_site_enabled()
def test_pins_rep_cap(multi_instrument_session):
multi_instrument_session.load_pin_map(os.path.join(test_files_base_dir, "pin_map.pinmap"))
# Channel-based properties
multi_instrument_session.vil = 1
multi_instrument_session.pins['PinA', 'PinB', 'PinC'].vil = 2
assert multi_instrument_session.pins['DutPins'].vil == pytest.approx(2, abs=1e-3)
assert multi_instrument_session.pins['SysPins'].vil == pytest.approx(1, abs=1e-3)
# Methods that accept channel_list parameter
states = multi_instrument_session.pins['PinA', 'PinB'].read_static()
assert len(states) == 4 # 2 sites per pin
# Methods that accept pin_list parameter
multi_instrument_session.create_time_set('t0')
multi_instrument_session.pins['PinA', 'PinB'].configure_time_set_drive_format(
time_set_name='t0',
drive_format=nidigital.DriveFormat.RL)
drive_format = multi_instrument_session.pins['PinA', 'PinB'].get_time_set_drive_format(time_set_name='t0')
assert drive_format == nidigital.DriveFormat.RL
def test_chained_sites_pins_rep_cap(multi_instrument_session):
multi_instrument_session.load_pin_map(os.path.join(test_files_base_dir, "pin_map.pinmap"))
multi_instrument_session.vil = 1
multi_instrument_session.sites[0, 1].pins['PinA', 'PinB', 'PinC'].vil = 2
assert multi_instrument_session.sites[0].pins['DutPins'].vil == pytest.approx(2, abs=1e-3)
assert multi_instrument_session.sites[1].pins['DutPins'].vil == pytest.approx(2, abs=1e-3)
def test_instruments_rep_cap(multi_instrument_session):
multi_instrument_session.timing_absolute_delay_enabled = True
delay0 = hightime.timedelta(microseconds=5e-3)
delay1 = hightime.timedelta(microseconds=-5e-3)
multi_instrument_session.instruments[instruments[0]].timing_absolute_delay = delay0
multi_instrument_session.instruments[instruments[1]].timing_absolute_delay = delay1
assert multi_instrument_session.instruments[instruments[0]].timing_absolute_delay == delay0
assert multi_instrument_session.instruments[instruments[1]].timing_absolute_delay == delay1
for instrument in instruments:
assert multi_instrument_session.instruments[instrument].serial_number == '0'
for instrument in instruments:
assert multi_instrument_session.instruments[instrument].instrument_firmware_revision == '0.0.0d0'
def test_pattern_opcode_events_rep_cap(multi_instrument_session):
assert '' == multi_instrument_session.pattern_opcode_events['patternOpcodeEvent3'].exported_pattern_opcode_event_output_terminal
requested_terminal_name = '/Dev1/PXI_Trig0'
multi_instrument_session.pattern_opcode_events['patternOpcodeEvent3'].exported_pattern_opcode_event_output_terminal = requested_terminal_name
assert requested_terminal_name == multi_instrument_session.pattern_opcode_events['patternOpcodeEvent3'].exported_pattern_opcode_event_output_terminal
def test_conditional_jump_triggers_rep_cap(multi_instrument_session):
assert nidigital.TriggerType.NONE == multi_instrument_session.conditional_jump_triggers['conditionalJumpTrigger3'].conditional_jump_trigger_type
requested_trigger_type = nidigital.TriggerType.DIGITAL_EDGE
multi_instrument_session.conditional_jump_triggers['conditionalJumpTrigger3'].conditional_jump_trigger_type = requested_trigger_type
assert requested_trigger_type == multi_instrument_session.conditional_jump_triggers['conditionalJumpTrigger3'].conditional_jump_trigger_type
def test_property_boolean(multi_instrument_session):
channel = multi_instrument_session.get_channel_names(indices=42)
multi_instrument_session.channels[channel].ppmu_allow_extended_voltage_range = True
assert multi_instrument_session.channels[channel].ppmu_allow_extended_voltage_range is True
def test_property_int32(multi_instrument_session):
channel = multi_instrument_session.get_channel_names(indices=42)
multi_instrument_session.channels[channel].termination_mode = nidigital.TerminationMode.HIGH_Z
assert multi_instrument_session.channels[channel].termination_mode == nidigital.TerminationMode.HIGH_Z
def test_property_int64(multi_instrument_session):
multi_instrument_session.cycle_number_history_ram_trigger_cycle_number = 42
assert multi_instrument_session.cycle_number_history_ram_trigger_cycle_number == 42
def test_property_real64(multi_instrument_session):
channel = multi_instrument_session.get_channel_names(indices=42)
multi_instrument_session.channels[channel].ppmu_voltage_level = 4
assert multi_instrument_session.channels[channel].ppmu_voltage_level == pytest.approx(4, rel=1e-3)
def test_property_string(multi_instrument_session):
multi_instrument_session.start_label = 'foo'
assert multi_instrument_session.start_label == 'foo'
def test_get_channel_names(multi_instrument_session):
expected_string = ['{0}/{1}'.format(instruments[0], x) for x in range(12)]
# Sanity test few different types of input. No need for test to be exhaustive
# since all the various types are covered by converter unit tests.
channel_indices = ['0-1, 2, 3:4', 5, (6, 7), range(8, 10), slice(10, 12)]
assert multi_instrument_session.get_channel_names(indices=channel_indices) == expected_string
def test_tdr_all_channels(multi_instrument_session):
applied_offsets = multi_instrument_session.tdr(apply_offsets=False)
assert len(applied_offsets) == multi_instrument_session.channel_count
multi_instrument_session.apply_tdr_offsets(applied_offsets)
channels = multi_instrument_session.get_channel_names(range(0, multi_instrument_session.channel_count))
fetched_offsets = [multi_instrument_session.channels[i].tdr_offset for i in channels]
assert fetched_offsets == applied_offsets
def test_tdr_some_channels(multi_instrument_session):
channels = multi_instrument_session.get_channel_names([63, 0, 49, 24])
applied_offsets = multi_instrument_session.channels[channels].tdr(apply_offsets=False)
assert len(applied_offsets) == len(channels)
multi_instrument_session.channels[channels].apply_tdr_offsets(applied_offsets)
fetched_offsets = [multi_instrument_session.channels[i].tdr_offset for i in channels]
assert fetched_offsets == applied_offsets
def test_burst_pattern_burst_only(multi_instrument_session):
test_files_folder = 'simple_pattern'
configure_session(multi_instrument_session, test_files_folder)
multi_instrument_session.load_pattern(get_test_file_path(test_files_folder, 'pattern.digipat'))
result = multi_instrument_session.burst_pattern(start_label='new_pattern', wait_until_done=False)
assert result is None
def test_burst_pattern_pass_fail(multi_instrument_session):
test_files_folder = 'simple_pattern'
configure_session(multi_instrument_session, test_files_folder)
multi_instrument_session.load_pattern(get_test_file_path(test_files_folder, 'pattern.digipat'))
result = multi_instrument_session.burst_pattern(start_label='new_pattern', wait_until_done=True)
assert result == {0: True, 1: True, 2: True, 3: True}
def test_source_waveform_parallel_broadcast(multi_instrument_session):
'''Test methods for using source waveform with parallel sourcing and broadcast data mapping.
- create_source_waveform_parallel
- write_source_waveform_broadcast
'''
test_name = test_source_waveform_parallel_broadcast.__name__
configure_session(multi_instrument_session, test_name)
multi_instrument_session.load_pattern(get_test_file_path(test_name, 'pattern.digipat'))
multi_instrument_session.pins['LowPins'].create_source_waveform_parallel(
waveform_name='src_wfm',
data_mapping=nidigital.SourceDataMapping.BROADCAST)
multi_instrument_session.write_source_waveform_broadcast(
waveform_name='src_wfm',
waveform_data=[i for i in range(4)])
pass_fail = multi_instrument_session.burst_pattern(start_label='new_pattern')
assert pass_fail == {0: True, 1: True}
def configure_session(session, test_name):
session.load_pin_map(get_test_file_path(test_name, 'pin_map.pinmap'))
session.load_specifications_levels_and_timing(
specifications_file_paths=get_test_file_path(test_name, 'specifications.specs'),
levels_file_paths=get_test_file_path(test_name, 'pin_levels.digilevels'),
timing_file_paths=get_test_file_path(test_name, 'timing.digitiming'))
session.apply_levels_and_timing(levels_sheet='pin_levels', timing_sheet='timing')
def get_test_file_path(test_name, file_name):
return os.path.join(test_files_base_dir, test_name, file_name)
@pytest.fixture(params=[array.array, numpy.array, list])
def source_waveform_type(request):
return request.param
def test_source_waveform_parallel_site_unique(multi_instrument_session, source_waveform_type):
'''Test methods for using source waveform with parallel sourcing and site-unique data mapping.
- create_source_waveform_parallel
- write_source_waveform_site_unique
'''
test_name = test_source_waveform_parallel_site_unique.__name__
configure_session(multi_instrument_session, test_name)
multi_instrument_session.load_pattern(get_test_file_path(test_name, 'pattern.digipat'))
num_samples = 256
multi_instrument_session.write_sequencer_register(reg=nidigital.SequencerRegister.REGISTER0, value=num_samples)
multi_instrument_session.pins['LowPins'].create_source_waveform_parallel(
waveform_name='src_wfm',
data_mapping=nidigital.SourceDataMapping.SITE_UNIQUE)
if source_waveform_type == array.array:
source_waveform = {
1: array.array('L', [i for i in range(num_samples)]),
0: array.array('L', [i for i in reversed(range(num_samples))])}
elif source_waveform_type == numpy.array:
source_waveform = {
1: numpy.array([i for i in range(num_samples)], dtype=numpy.uint32),
0: numpy.array([i for i in reversed(range(num_samples))], dtype=numpy.uint32)}
elif source_waveform_type == list:
source_waveform = {
1: [i for i in range(num_samples)],
0: [i for i in reversed(range(num_samples))]}
else:
assert False, "Invalid source waveform data type: {}".format(source_waveform_type)
multi_instrument_session.write_source_waveform_site_unique(
waveform_name='src_wfm',
waveform_data=source_waveform)
multi_instrument_session.pins['HighPins'].create_capture_waveform_parallel(waveform_name='capt_wfm')
multi_instrument_session.burst_pattern(start_label='new_pattern')
# Pattern burst is configured to fetch num_samples samples
fetched_waveforms = multi_instrument_session.fetch_capture_waveform(
waveform_name='capt_wfm',
samples_to_read=num_samples)
assert sorted(fetched_waveforms.keys()) == sorted([0, 1])
assert all(len(fetched_waveforms[site]) == num_samples for site in fetched_waveforms)
@pytest.fixture(params=[tuple, int, str])
def source_waveform_wrong_type(request):
return request.param
def test_source_waveform_parallel_site_unique_wrong_type(multi_instrument_session, source_waveform_wrong_type):
'''Test methods for passing wrong types write_source_waveform_site_unique .
- create_source_waveform_parallel
- write_source_waveform_site_unique
'''
test_name = test_source_waveform_parallel_site_unique.__name__
configure_session(multi_instrument_session, test_name)
multi_instrument_session.load_pattern(get_test_file_path(test_name, 'pattern.digipat'))
num_samples = 256
multi_instrument_session.write_sequencer_register(reg=nidigital.SequencerRegister.REGISTER0, value=num_samples)
multi_instrument_session.pins['LowPins'].create_source_waveform_parallel(
waveform_name='src_wfm',
data_mapping=nidigital.SourceDataMapping.SITE_UNIQUE)
if source_waveform_wrong_type == tuple:
source_waveform = ([i for i in range(num_samples)], [i for i in reversed(range(num_samples))])
elif source_waveform_wrong_type == int:
source_waveform = num_samples
elif source_waveform_wrong_type == str:
source_waveform = {
str(1): [str(i) for i in range(num_samples)],
str(0): [str(i) for i in reversed(range(num_samples))]}
else:
assert False, "Invalid source waveform data type: {}".format(source_waveform_type)
with pytest.raises(TypeError):
multi_instrument_session.write_source_waveform_site_unique(
waveform_name='src_wfm',
waveform_data=source_waveform)
def test_fetch_capture_waveform_parallel(multi_instrument_session):
'''Test methods for using capture waveform with parallel acquisition.
- create_capture_waveform_parallel
- fetch_capture_waveform
'''
test_name = test_fetch_capture_waveform_parallel.__name__
configure_session(multi_instrument_session, test_name)
multi_instrument_session.load_pattern(get_test_file_path(test_name, 'pattern.digipat'))
num_samples = 256
multi_instrument_session.write_sequencer_register(reg=nidigital.SequencerRegister.REGISTER0, value=num_samples)
multi_instrument_session.pins['LowPins'].create_source_waveform_parallel(
waveform_name='src_wfm',
data_mapping=nidigital.SourceDataMapping.BROADCAST)
source_waveform = [i for i in range(num_samples)]
multi_instrument_session.write_source_waveform_broadcast(waveform_name='src_wfm', waveform_data=source_waveform)
multi_instrument_session.pins['HighPins'].create_capture_waveform_parallel(waveform_name='capt_wfm')
multi_instrument_session.burst_pattern(start_label='new_pattern')
# Pattern burst is configured to fetch num_samples samples
samples_per_fetch = 8
waveforms = collections.defaultdict(list)
for i in range(num_samples // samples_per_fetch):
fetched_waveform = multi_instrument_session.sites[1, 0].fetch_capture_waveform(
waveform_name='capt_wfm',
samples_to_read=samples_per_fetch)
for site in fetched_waveform:
waveforms[site] += fetched_waveform[site]
assert sorted(waveforms.keys()) == sorted([0, 1])
assert all(len(waveforms[site]) == num_samples for site in waveforms)
# Burst on subset of sites and verify fetch_capture_waveform()
multi_instrument_session.sites[1].burst_pattern(start_label='new_pattern')
fetched_waveform = multi_instrument_session.fetch_capture_waveform(
waveform_name='capt_wfm',
samples_to_read=num_samples)
assert len(fetched_waveform) == 1
fetched_site = next(iter(fetched_waveform))
assert fetched_site == 1
assert len(fetched_waveform[fetched_site]) == num_samples
def test_get_pin_results_pin_information(multi_instrument_session):
# Also tests load_pin_map
multi_instrument_session.load_pin_map(os.path.join(test_files_base_dir, "pin_map.pinmap"))
fully_qualified_channels = [instruments[1] + '/0', instruments[0] + '/1', instruments[1] + '/11']
pin_info = multi_instrument_session.channels[fully_qualified_channels].get_pin_results_pin_information()
pins = [i.pin_name for i in pin_info]
sites = [i.site_number for i in pin_info]
channels = [i.channel_name for i in pin_info]
assert pins == ['PinA', 'PinB', '']
assert sites == [1, 0, -1]
assert channels == fully_qualified_channels
def test_history_ram_cycle_information_representation():
cycle_info = nidigital.HistoryRAMCycleInformation(
pattern_name='pat',
time_set_name='t0',
vector_number=42,
cycle_number=999,
scan_cycle_number=13,
expected_pin_states=[[nidigital.PinState.D, nidigital.PinState.D], [nidigital.PinState.V, nidigital.PinState.V]],
actual_pin_states=[[nidigital.PinState.PIN_STATE_NOT_ACQUIRED, nidigital.PinState.PIN_STATE_NOT_ACQUIRED],
[nidigital.PinState.NOT_A_PIN_STATE, nidigital.PinState.NOT_A_PIN_STATE]],
per_pin_pass_fail=[[True, True], [False, False]])
recreated_cycle_info = eval(repr(cycle_info))
assert str(recreated_cycle_info) == str(cycle_info)
def test_history_ram_cycle_information_string():
cycle_info = nidigital.HistoryRAMCycleInformation(
pattern_name='pat',
time_set_name='t0',
vector_number=42,
cycle_number=999,
scan_cycle_number=13,
expected_pin_states=[[nidigital.PinState.D, nidigital.PinState.V], [nidigital.PinState.V, nidigital.PinState.D]],
actual_pin_states=[[nidigital.PinState.PIN_STATE_NOT_ACQUIRED, nidigital.PinState.PIN_STATE_NOT_ACQUIRED], [nidigital.PinState.ZERO, nidigital.PinState.ONE]],
per_pin_pass_fail=[[True, True], [False, False]])
print(cycle_info)
expected_string = '''Pattern Name : pat
Time Set Name : t0
Vector Number : 42
Cycle Number : 999
Scan Cycle Number : 13
Expected Pin States : [[D, V], [V, D]]
Actual Pin States : [[PIN_STATE_NOT_ACQUIRED, PIN_STATE_NOT_ACQUIRED], [ZERO, ONE]]
Per Pin Pass Fail : [[True, True], [False, False]]
'''
assert str(cycle_info) == expected_string
def test_fetch_history_ram_cycle_information_without_site(multi_instrument_session):
configure_for_history_ram_test(multi_instrument_session)
with pytest.raises(ValueError, match='Site number on which to retrieve pattern information must be specified via sites repeated capability.'):
multi_instrument_session.fetch_history_ram_cycle_information(position=-1, samples_to_read=-1)
def test_fetch_history_ram_cycle_information_position_negative(multi_instrument_session):
configure_for_history_ram_test(multi_instrument_session)
with pytest.raises(ValueError, match='position should be greater than or equal to 0.'):
multi_instrument_session.sites[1].fetch_history_ram_cycle_information(position=-1, samples_to_read=-1)
def configure_for_history_ram_test(session):
test_files_folder = 'test_fetch_history_ram_cycle_information'
configure_session(session, test_files_folder)
session.load_pattern(get_test_file_path(test_files_folder, 'pattern.digipat'))
session.history_ram_trigger_type = nidigital.HistoryRAMTriggerType.FIRST_FAILURE
session.history_ram_cycles_to_acquire = nidigital.HistoryRAMCyclesToAcquire.ALL
session.history_ram_pretrigger_samples = 0
session.history_ram_number_of_samples_is_finite = True
session.sites[1].burst_pattern(start_label='new_pattern')
@pytest.mark.skip(reason="TODO(sbethur): Enable running on simulated session. GitHub issue #1273")
def test_fetch_history_ram_cycle_information_position_out_of_bound(multi_instrument_session):
configure_for_history_ram_test(multi_instrument_session)
with pytest.raises(ValueError, match='position: Specified value = 8, Maximum value = 6.'):
multi_instrument_session.sites[1].fetch_history_ram_cycle_information(position=8, samples_to_read=-1)
@pytest.mark.skip(reason="TODO(sbethur): Enable running on simulated session. GitHub issue #1273")
def test_fetch_history_ram_cycle_information_position_last(multi_instrument_session):
configure_for_history_ram_test(multi_instrument_session)
history_ram_cycle_info = multi_instrument_session.sites[1].fetch_history_ram_cycle_information(
position=6,
samples_to_read=-1)
assert len(history_ram_cycle_info) == 1
assert history_ram_cycle_info[0].vector_number == 9
assert history_ram_cycle_info[0].cycle_number == 11
def test_fetch_history_ram_cycle_information_is_finite_invalid(multi_instrument_session):
configure_for_history_ram_test(multi_instrument_session)
multi_instrument_session.history_ram_number_of_samples_is_finite = False
expected_error_description = (
'Specifying -1 to fetch all History RAM samples is not supported when the digital pattern instrument '
'is configured for continuous History RAM acquisition. You must specify an exact number of samples to fetch.')
with pytest.raises(RuntimeError, match=expected_error_description):
multi_instrument_session.sites[1].fetch_history_ram_cycle_information(position=0, samples_to_read=-1)
@pytest.mark.skip(reason="TODO(sbethur): Enable running on simulated session. GitHub issue #1273")
def test_fetch_history_ram_cycle_information_samples_to_read_too_much(multi_instrument_session):
configure_for_history_ram_test(multi_instrument_session)
assert multi_instrument_session.sites[1].get_history_ram_sample_count() == 7
multi_instrument_session.sites[1].fetch_history_ram_cycle_information(position=0, samples_to_read=3)
expected_error_description = (
'position: Specified value = 3, samples_to_read: Specified value = 5; Samples available = 4.')
with pytest.raises(ValueError, match=expected_error_description):
multi_instrument_session.sites[1].fetch_history_ram_cycle_information(position=3, samples_to_read=5)
def test_fetch_history_ram_cycle_information_samples_to_read_negative(multi_instrument_session):
configure_for_history_ram_test(multi_instrument_session)
with pytest.raises(ValueError, match='samples_to_read should be greater than or equal to -1.'):
multi_instrument_session.sites[1].fetch_history_ram_cycle_information(position=0, samples_to_read=-2)
def test_fetch_history_ram_cycle_information_samples_to_read_zero(multi_instrument_session):
configure_for_history_ram_test(multi_instrument_session)
history_ram_cycle_info = multi_instrument_session.sites[1].fetch_history_ram_cycle_information(
position=0,
samples_to_read=0)
assert len(history_ram_cycle_info) == 0
@pytest.mark.skip(reason="TODO(sbethur): Enable running on simulated session. GitHub issue #1273")
def test_fetch_history_ram_cycle_information_samples_to_read_all(multi_instrument_session):
configure_for_history_ram_test(multi_instrument_session)
history_ram_cycle_info = multi_instrument_session.sites[1].fetch_history_ram_cycle_information(
position=0,
samples_to_read=-1)
assert len(history_ram_cycle_info) == 7
assert all([i.pattern_name == 'new_pattern' for i in history_ram_cycle_info])
time_set_names = [i.time_set_name for i in history_ram_cycle_info]
assert time_set_names == ['t0', 'tScan', 'tScan', 't2X', 't2X', 't2X', 't0']
vector_numbers = [i.vector_number for i in history_ram_cycle_info]
assert vector_numbers == [5, 6, 6, 7, 7, 8, 9]
cycle_numbers = [i.cycle_number for i in history_ram_cycle_info]
assert cycle_numbers == list(range(5, 12))
scan_cycle_numbers = [i.scan_cycle_number for i in history_ram_cycle_info]
assert scan_cycle_numbers == [-1, 0, 1, -1, -1, -1, -1]
pin_names = multi_instrument_session.get_pattern_pin_names('new_pattern')
assert pin_names == ['LO' + str(i) for i in range(4)] + ['HI' + str(i) for i in range(4)]
expected_pin_states = [i.expected_pin_states for i in history_ram_cycle_info]
assert expected_pin_states == [
[[nidigital.PinState.ZERO, nidigital.PinState.H, nidigital.PinState.X, nidigital.PinState.X, nidigital.PinState.H, nidigital.PinState.ZERO, nidigital.PinState.X, nidigital.PinState.X]],
[[nidigital.PinState.X, nidigital.PinState.X, nidigital.PinState.ZERO, nidigital.PinState.ONE, nidigital.PinState.X, nidigital.PinState.X, nidigital.PinState.L, nidigital.PinState.H]],
[[nidigital.PinState.X, nidigital.PinState.X, nidigital.PinState.ONE, nidigital.PinState.ZERO, nidigital.PinState.X, nidigital.PinState.X, nidigital.PinState.H, nidigital.PinState.L]],
[[nidigital.PinState.ONE, nidigital.PinState.ONE, nidigital.PinState.X, nidigital.PinState.X, nidigital.PinState.H, nidigital.PinState.H, nidigital.PinState.X, nidigital.PinState.X], [nidigital.PinState.ZERO, nidigital.PinState.ZERO, nidigital.PinState.X, nidigital.PinState.X, nidigital.PinState.L, nidigital.PinState.L, nidigital.PinState.X, nidigital.PinState.X]],
[[nidigital.PinState.ONE, nidigital.PinState.ONE, nidigital.PinState.X, nidigital.PinState.X, nidigital.PinState.H, nidigital.PinState.H, nidigital.PinState.X, nidigital.PinState.X], [nidigital.PinState.ZERO, nidigital.PinState.ZERO, nidigital.PinState.X, nidigital.PinState.X, nidigital.PinState.L, nidigital.PinState.L, nidigital.PinState.X, nidigital.PinState.X]],
[[nidigital.PinState.ZERO, nidigital.PinState.ONE, nidigital.PinState.X, nidigital.PinState.X, nidigital.PinState.L, nidigital.PinState.H, nidigital.PinState.X, nidigital.PinState.X], [nidigital.PinState.ONE, nidigital.PinState.ZERO, nidigital.PinState.X, nidigital.PinState.X, nidigital.PinState.H, nidigital.PinState.L, nidigital.PinState.X, nidigital.PinState.X]],
[[nidigital.PinState.X, nidigital.PinState.X, nidigital.PinState.X, nidigital.PinState.X, nidigital.PinState.X, nidigital.PinState.X, nidigital.PinState.X, nidigital.PinState.X]]
]
# If test expects actual pin state to be 'X', then value returned by the returned can be anything.
# So, need to skip those pin states while comparing.
actual_pin_states = [i.actual_pin_states for i in history_ram_cycle_info]
actual_pin_states_expected_by_test = [
[[nidigital.PinState.L, nidigital.PinState.L, nidigital.PinState.X, nidigital.PinState.X, nidigital.PinState.L, nidigital.PinState.L, nidigital.PinState.X, nidigital.PinState.X]],
[[nidigital.PinState.X, nidigital.PinState.X, nidigital.PinState.L, nidigital.PinState.H, nidigital.PinState.X, nidigital.PinState.X, nidigital.PinState.L, nidigital.PinState.H]],
[[nidigital.PinState.X, nidigital.PinState.X, nidigital.PinState.H, nidigital.PinState.L, nidigital.PinState.X, nidigital.PinState.X, nidigital.PinState.H, nidigital.PinState.L]],
[[nidigital.PinState.H, nidigital.PinState.H, nidigital.PinState.X, nidigital.PinState.X, nidigital.PinState.H, nidigital.PinState.H, nidigital.PinState.X, nidigital.PinState.X], [nidigital.PinState.L, nidigital.PinState.L, nidigital.PinState.X, nidigital.PinState.X, nidigital.PinState.L, nidigital.PinState.L, nidigital.PinState.X, nidigital.PinState.X]],
[[nidigital.PinState.H, nidigital.PinState.H, nidigital.PinState.X, nidigital.PinState.X, nidigital.PinState.H, nidigital.PinState.H, nidigital.PinState.X, nidigital.PinState.X], [nidigital.PinState.L, nidigital.PinState.L, nidigital.PinState.X, nidigital.PinState.X, nidigital.PinState.L, nidigital.PinState.L, nidigital.PinState.X, nidigital.PinState.X]],
[[nidigital.PinState.L, nidigital.PinState.H, nidigital.PinState.X, nidigital.PinState.X, nidigital.PinState.L, nidigital.PinState.H, nidigital.PinState.X, nidigital.PinState.X], [nidigital.PinState.H, nidigital.PinState.L, nidigital.PinState.X, nidigital.PinState.X, nidigital.PinState.H, nidigital.PinState.L, nidigital.PinState.X, nidigital.PinState.X]],
[[nidigital.PinState.X, nidigital.PinState.X, nidigital.PinState.X, nidigital.PinState.X, nidigital.PinState.X, nidigital.PinState.X, nidigital.PinState.X, nidigital.PinState.X]]
]
assert len(actual_pin_states) == len(actual_pin_states_expected_by_test)
for vector_pin_states, vector_pin_states_expected_by_test in zip(actual_pin_states, actual_pin_states_expected_by_test):
for cycle_pin_states, cycle_pin_states_expected_by_test in zip(vector_pin_states, vector_pin_states_expected_by_test):
for pin_state, pin_state_expected_by_test in zip(cycle_pin_states, cycle_pin_states_expected_by_test):
if pin_state_expected_by_test is not nidigital.PinState.X:
assert pin_state == pin_state_expected_by_test
# Only the first cycle returned is expected to have failures
per_pin_pass_fail = [i.per_pin_pass_fail for i in history_ram_cycle_info]
assert per_pin_pass_fail == [
[[True, False, True, True, False, True, True, True]],
[[True, True, True, True, True, True, True, True]],
[[True, True, True, True, True, True, True, True]],
[[True, True, True, True, True, True, True, True], [True, True, True, True, True, True, True, True]],
[[True, True, True, True, True, True, True, True], [True, True, True, True, True, True, True, True]],
[[True, True, True, True, True, True, True, True], [True, True, True, True, True, True, True, True]],
[[True, True, True, True, True, True, True, True]],
]
def test_fetch_history_ram_cycle_information_no_failures(multi_instrument_session):
test_name = 'simple_pattern'
configure_session(multi_instrument_session, test_name)
multi_instrument_session.load_pattern(get_test_file_path(test_name, 'pattern.digipat'))
multi_instrument_session.burst_pattern(start_label='new_pattern')
history_ram_cycle_info = multi_instrument_session.sites[0].fetch_history_ram_cycle_information(
position=0,
samples_to_read=-1)
assert len(history_ram_cycle_info) == 0
history_ram_cycle_info = multi_instrument_session.sites[0].fetch_history_ram_cycle_information(
position=0,
samples_to_read=0)
assert len(history_ram_cycle_info) == 0
def test_get_pattern_pin_names(multi_instrument_session):
# Also tests load_pattern
test_name = 'simple_pattern'
configure_session(multi_instrument_session, test_name)
multi_instrument_session.load_pattern(get_test_file_path(test_name, 'pattern.digipat'))
pattern_pin_names = multi_instrument_session.get_pattern_pin_names(start_label='new_pattern')
assert pattern_pin_names == ['LO' + str(i) for i in range(4)] + ['HI' + str(i) for i in range(4)]
def test_get_site_pass_fail(multi_instrument_session):
test_files_folder = 'simple_pattern'
configure_session(multi_instrument_session, test_files_folder)
multi_instrument_session.load_pattern(get_test_file_path(test_files_folder, 'pattern.digipat'))
multi_instrument_session.burst_pattern(start_label='new_pattern')
pass_fail = multi_instrument_session.get_site_pass_fail()
assert pass_fail == {0: True, 1: True, 2: True, 3: True}
pass_fail = multi_instrument_session.sites[3, 0].get_site_pass_fail()
assert pass_fail == {3: True, 0: True}
def test_get_fail_count(multi_instrument_session):
test_files_folder = 'simple_pattern'
configure_session(multi_instrument_session, test_files_folder)
multi_instrument_session.load_pattern(get_test_file_path(test_files_folder, 'pattern.digipat'))
multi_instrument_session.burst_pattern(start_label='new_pattern')
fail_count = multi_instrument_session.get_fail_count()
assert fail_count == [0] * multi_instrument_session.channel_count
fail_count = multi_instrument_session.pins['site0/LO0', 'site0/HI1', 'site2/HI3'].get_fail_count()
assert fail_count == [0] * 3
def test_ppmu_measure(multi_instrument_session):
test_name = 'simple_pattern'
configure_session(multi_instrument_session, test_name)
voltage_measurements = multi_instrument_session.pins['site0/LO0', 'site1/HI0'].ppmu_measure(
nidigital.PPMUMeasurementType.VOLTAGE)
assert len(voltage_measurements) == 2
def test_ppmu_source(multi_instrument_session):
test_name = 'simple_pattern'
configure_session(multi_instrument_session, test_name)
multi_instrument_session.pins['site0/LO0', 'site1/HI0'].ppmu_source()
def test_read_static(multi_instrument_session):
test_name = 'simple_pattern'
configure_session(multi_instrument_session, test_name)
pin_states = multi_instrument_session.pins['site0/LO0', 'site1/HI0'].read_static()
assert pin_states == [nidigital.PinState.L] * 2
def test_write_static(multi_instrument_session):
test_name = 'simple_pattern'
configure_session(multi_instrument_session, test_name)
multi_instrument_session.pins['site0/LO0', 'site1/HI0'].write_static(
nidigital.WriteStaticPinState.ONE)
def test_read_sequencer_flag(multi_instrument_session):
flag_state = multi_instrument_session.read_sequencer_flag(nidigital.SequencerFlag.FLAG1)
assert flag_state is False
def test_write_sequencer_flag(multi_instrument_session):
multi_instrument_session.write_sequencer_flag(nidigital.SequencerFlag.FLAG2, True)
def test_read_sequencer_register(multi_instrument_session):
register_value = multi_instrument_session.read_sequencer_register(
nidigital.SequencerRegister.REGISTER10)
assert register_value == 0
def test_write_sequencer_register(multi_instrument_session):
multi_instrument_session.write_sequencer_register(
nidigital.SequencerRegister.REGISTER15,
65535)
def test_configure_voltage_levels(multi_instrument_session):
assert multi_instrument_session.vil == pytest.approx(0.0, abs=1e-4)
assert multi_instrument_session.vih == pytest.approx(3.3, rel=1e-3)
assert multi_instrument_session.vol == pytest.approx(1.6, rel=1e-3)
assert multi_instrument_session.voh == pytest.approx(1.7, rel=1e-3)
assert multi_instrument_session.vterm == pytest.approx(2.0, rel=1e-3)
multi_instrument_session.load_pin_map(os.path.join(test_files_base_dir, "pin_map.pinmap"))
multi_instrument_session.pins['site0/PinA', 'site1/PinC'].configure_voltage_levels(
vil=1.0,
vih=2.0,
vol=3.0,
voh=4.0,
vterm=5.0)
assert multi_instrument_session.pins['site0/PinA', 'site1/PinC'].vil == pytest.approx(1.0, rel=1e-3)
assert multi_instrument_session.pins['site0/PinA', 'site1/PinC'].vih == pytest.approx(2.0, rel=1e-3)
assert multi_instrument_session.pins['site0/PinA', 'site1/PinC'].vol == pytest.approx(3.0, rel=1e-3)
assert multi_instrument_session.pins['site0/PinA', 'site1/PinC'].voh == pytest.approx(4.0, rel=1e-3)
assert multi_instrument_session.pins['site0/PinA', 'site1/PinC'].vterm == pytest.approx(5.0, rel=1e-3)
def test_configure_active_load_levels(multi_instrument_session):
assert multi_instrument_session.active_load_iol == pytest.approx(0.0015, rel=1e-3)
assert multi_instrument_session.active_load_ioh == pytest.approx(-0.0015, rel=1e-3)
assert multi_instrument_session.active_load_vcom == pytest.approx(2.0, rel=1e-3)
multi_instrument_session.load_pin_map(os.path.join(test_files_base_dir, "pin_map.pinmap"))
multi_instrument_session.pins['site0/PinA', 'site1/PinC'].configure_active_load_levels(
iol=0.024,
ioh=-0.024,
vcom=3.0)
assert multi_instrument_session.pins['site0/PinA', 'site1/PinC'].active_load_iol == pytest.approx(0.024, rel=1e-3)
assert multi_instrument_session.pins['site0/PinA', 'site1/PinC'].active_load_ioh == pytest.approx(-0.024, rel=1e-3)
assert multi_instrument_session.pins['site0/PinA', 'site1/PinC'].active_load_vcom == pytest.approx(3.0, rel=1e-3)
def test_clock_generator_abort(multi_instrument_session):
multi_instrument_session.load_pin_map(os.path.join(test_files_base_dir, "pin_map.pinmap"))
multi_instrument_session.pins['site0/PinA', 'site1/PinC'].clock_generator_abort()
def test_clock_generator_generate_clock(multi_instrument_session):
multi_instrument_session.load_pin_map(os.path.join(test_files_base_dir, "pin_map.pinmap"))
multi_instrument_session.pins['site0/PinA', 'site1/PinC'].clock_generator_generate_clock(
1e6,
True)
def test_frequency_counter_measure_frequency(multi_instrument_session):
multi_instrument_session.load_pin_map(os.path.join(test_files_base_dir, "pin_map.pinmap"))
multi_instrument_session.pins['site0/PinA', 'site1/PinC'].selected_function = nidigital.SelectedFunction.DIGITAL
multi_instrument_session.pins['site0/PinA', 'site1/PinC'].frequency_counter_measurement_time = hightime.timedelta(milliseconds=5)
frequencies = multi_instrument_session.pins['site0/PinA', 'site1/PinC'].frequency_counter_measure_frequency()
assert frequencies == [0] * 2
def test_create_get_delete_time_sets(multi_instrument_session):
'''Test basic time set methods.
- create_time_set
- delete_all_time_sets
'''
time_set_a = 'time_set_abc'
time_set_b = 'time_set_123'
multi_instrument_session.load_pin_map(os.path.join(test_files_base_dir, "pin_map.pinmap"))
multi_instrument_session.create_time_set(time_set_a)
multi_instrument_session.create_time_set(time_set_b)
multi_instrument_session.delete_all_time_sets()
def test_configure_get_time_set_period(multi_instrument_session):
'''Test time set period methods.
- configure_time_set_period
- get_time_set_period
'''
time_set_name = 'time_set_abc'
time_set_period = hightime.timedelta(microseconds=10)
multi_instrument_session.load_pin_map(os.path.join(test_files_base_dir, "pin_map.pinmap"))
multi_instrument_session.create_time_set(time_set_name)
assert multi_instrument_session.get_time_set_period(time_set_name) == hightime.timedelta(microseconds=1)
multi_instrument_session.configure_time_set_period(time_set_name, time_set_period)
assert multi_instrument_session.get_time_set_period(time_set_name) == time_set_period
def test_configure_get_time_set_drive_format(multi_instrument_session):
'''Test time set drive format methods.
- configure_time_set_drive_format
- get_time_set_drive_format
'''
time_set_name = 'time_set_abc'
time_set_drive_format = nidigital.DriveFormat.SBC
multi_instrument_session.load_pin_map(os.path.join(test_files_base_dir, "pin_map.pinmap"))
multi_instrument_session.create_time_set(time_set_name)
assert multi_instrument_session.pins['site0/PinA', 'site1/PinC'].get_time_set_drive_format(time_set_name) == nidigital.DriveFormat.NR
multi_instrument_session.pins['site0/PinA', 'site1/PinC'].configure_time_set_drive_format(time_set_name, time_set_drive_format)
assert multi_instrument_session.pins['site0/PinA', 'site1/PinC'].get_time_set_drive_format(time_set_name) == time_set_drive_format
def test_configure_get_time_set_edge(multi_instrument_session):
'''Test time set individual edge methods.
- configure_time_set_edge
- get_time_set_edge
'''
time_set_name = 'time_set_abc'
time_set_period = hightime.timedelta(microseconds=10)
time_set_drive_on = time_set_period * 0.5
multi_instrument_session.load_pin_map(os.path.join(test_files_base_dir, "pin_map.pinmap"))
multi_instrument_session.create_time_set(time_set_name)
multi_instrument_session.configure_time_set_period(time_set_name, time_set_period)
assert multi_instrument_session.pins['site0/PinA', 'site1/PinC'].get_time_set_edge(
time_set_name,
nidigital.TimeSetEdgeType.DRIVE_ON) == hightime.timedelta(seconds=0)
multi_instrument_session.pins['site0/PinA', 'site1/PinC'].configure_time_set_edge(
time_set_name,
nidigital.TimeSetEdgeType.DRIVE_ON,
time_set_drive_on)
assert multi_instrument_session.pins['site0/PinA', 'site1/PinC'].get_time_set_edge(
time_set_name,
nidigital.TimeSetEdgeType.DRIVE_ON) == time_set_drive_on
def test_configure_time_set_drive_edges(multi_instrument_session):
time_set_name = 'time_set_abc'
time_set_period = hightime.timedelta(microseconds=10)
time_set_drive_format = nidigital.DriveFormat.RL
time_set_drive_on = time_set_period * 0.1
time_set_drive_data = time_set_period * 0.2
time_set_drive_return = time_set_period * 0.8
time_set_drive_off = time_set_period * 0.9
multi_instrument_session.load_pin_map(os.path.join(test_files_base_dir, "pin_map.pinmap"))
multi_instrument_session.create_time_set(time_set_name)
multi_instrument_session.configure_time_set_period(time_set_name, time_set_period)
multi_instrument_session.pins['site0/PinA', 'site1/PinC'].configure_time_set_drive_edges(
time_set_name,
time_set_drive_format,
time_set_drive_on,
time_set_drive_data,
time_set_drive_return,
time_set_drive_off)
assert multi_instrument_session.pins['site0/PinA', 'site1/PinC'].get_time_set_drive_format(time_set_name) == time_set_drive_format
assert multi_instrument_session.pins['site0/PinA', 'site1/PinC'].get_time_set_edge(
time_set_name,
nidigital.TimeSetEdgeType.DRIVE_ON) == time_set_drive_on
assert multi_instrument_session.pins['site0/PinA', 'site1/PinC'].get_time_set_edge(
time_set_name,
nidigital.TimeSetEdgeType.DRIVE_DATA) == time_set_drive_data
assert multi_instrument_session.pins['site0/PinA', 'site1/PinC'].get_time_set_edge(
time_set_name,
nidigital.TimeSetEdgeType.DRIVE_RETURN) == time_set_drive_return
assert multi_instrument_session.pins['site0/PinA', 'site1/PinC'].get_time_set_edge(
time_set_name,
nidigital.TimeSetEdgeType.DRIVE_OFF) == time_set_drive_off
def test_configure_time_set_compare_edges_strobe(multi_instrument_session):
time_set_name = 'time_set_abc'
time_set_period = hightime.timedelta(microseconds=10)
time_set_strobe = time_set_period * 0.5
multi_instrument_session.load_pin_map(os.path.join(test_files_base_dir, "pin_map.pinmap"))
multi_instrument_session.create_time_set(time_set_name)
multi_instrument_session.configure_time_set_period(time_set_name, time_set_period)
multi_instrument_session.pins['site0/PinA', 'site1/PinC'].configure_time_set_compare_edges_strobe(
time_set_name,
time_set_strobe)
assert multi_instrument_session.pins['site0/PinA', 'site1/PinC'].get_time_set_edge(
time_set_name,
nidigital.TimeSetEdgeType.COMPARE_STROBE) == time_set_strobe
def test_configure_get_time_set_edge_multiplier(multi_instrument_session):
'''Test time set edge multiplier methods.
- configure_time_set_edge_multiplier
- get_time_set_edge_multiplier
'''
time_set_name = 'time_set_abc'
time_set_period = hightime.timedelta(microseconds=10)
time_set_edge_multiplier = 2
multi_instrument_session.load_pin_map(os.path.join(test_files_base_dir, "pin_map.pinmap"))
multi_instrument_session.create_time_set(time_set_name)
multi_instrument_session.configure_time_set_period(time_set_name, time_set_period)
assert multi_instrument_session.pins['site0/PinA', 'site1/PinC'].get_time_set_edge_multiplier(time_set_name) == 1
multi_instrument_session.pins['site0/PinA', 'site1/PinC'].configure_time_set_edge_multiplier(time_set_name, time_set_edge_multiplier)
assert multi_instrument_session.pins['site0/PinA', 'site1/PinC'].get_time_set_edge_multiplier(time_set_name) == time_set_edge_multiplier
def test_configure_time_set_drive_edges2x(multi_instrument_session):
time_set_name = 'time_set_abc'
time_set_period = hightime.timedelta(microseconds=10)
time_set_drive_format = nidigital.DriveFormat.RL
time_set_drive_on = time_set_period * 0.1
time_set_drive_data = time_set_period * 0.2
time_set_drive_return = time_set_period * 0.5
time_set_drive_data2 = time_set_period * 0.7
time_set_drive_return2 = time_set_period * 0.9
time_set_drive_off = time_set_period * 0.9
multi_instrument_session.load_pin_map(os.path.join(test_files_base_dir, "pin_map.pinmap"))
multi_instrument_session.create_time_set(time_set_name)
multi_instrument_session.configure_time_set_period(time_set_name, time_set_period)
multi_instrument_session.pins['site0/PinA', 'site1/PinC'].configure_time_set_edge_multiplier(time_set_name, 2)
multi_instrument_session.pins['site0/PinA', 'site1/PinC'].configure_time_set_drive_edges2x(
time_set_name,
time_set_drive_format,
time_set_drive_on,
time_set_drive_data,
time_set_drive_return,
time_set_drive_off,
time_set_drive_data2,
time_set_drive_return2)
assert multi_instrument_session.pins['site0/PinA', 'site1/PinC'].get_time_set_drive_format(time_set_name) == time_set_drive_format
assert multi_instrument_session.pins['site0/PinA', 'site1/PinC'].get_time_set_edge(
time_set_name,
nidigital.TimeSetEdgeType.DRIVE_ON) == time_set_drive_on
assert multi_instrument_session.pins['site0/PinA', 'site1/PinC'].get_time_set_edge(
time_set_name,
nidigital.TimeSetEdgeType.DRIVE_DATA) == time_set_drive_data
assert multi_instrument_session.pins['site0/PinA', 'site1/PinC'].get_time_set_edge(
time_set_name,
nidigital.TimeSetEdgeType.DRIVE_RETURN) == time_set_drive_return
assert multi_instrument_session.pins['site0/PinA', 'site1/PinC'].get_time_set_edge(
time_set_name,
nidigital.TimeSetEdgeType.DRIVE_OFF) == time_set_drive_off
assert multi_instrument_session.pins['site0/PinA', 'site1/PinC'].get_time_set_edge(
time_set_name,
nidigital.TimeSetEdgeType.DRIVE_DATA2) == time_set_drive_data2
assert multi_instrument_session.pins['site0/PinA', 'site1/PinC'].get_time_set_edge(
time_set_name,
nidigital.TimeSetEdgeType.DRIVE_RETURN2) == time_set_drive_return2
def test_configure_time_set_compare_edges_strobe2x(multi_instrument_session):
time_set_name = 'time_set_abc'
time_set_period = hightime.timedelta(microseconds=10)
time_set_strobe = time_set_period * 0.4
time_set_strobe2 = time_set_period * 0.8
multi_instrument_session.load_pin_map(os.path.join(test_files_base_dir, "pin_map.pinmap"))
multi_instrument_session.create_time_set(time_set_name)
multi_instrument_session.configure_time_set_period(time_set_name, time_set_period)
multi_instrument_session.pins['site0/PinA', 'site1/PinC'].configure_time_set_edge_multiplier(time_set_name, 2)
multi_instrument_session.pins['site0/PinA', 'site1/PinC'].configure_time_set_compare_edges_strobe2x(
time_set_name,
time_set_strobe,
time_set_strobe2)
assert multi_instrument_session.pins['site0/PinA', 'site1/PinC'].get_time_set_edge(
time_set_name,
nidigital.TimeSetEdgeType.COMPARE_STROBE) == time_set_strobe
assert multi_instrument_session.pins['site0/PinA', 'site1/PinC'].get_time_set_edge(
time_set_name,
nidigital.TimeSetEdgeType.COMPARE_STROBE2) == time_set_strobe2
def test_enable_disable_sites_single(multi_instrument_session):
'''Test methods for single site enable configuration.
- enable_sites
- disable_sites
- is_site_enabled
'''
multi_instrument_session.load_pin_map(os.path.join(test_files_base_dir, "pin_map.pinmap"))
assert multi_instrument_session.sites[1].is_site_enabled()
# Single site configuration
multi_instrument_session.sites[1].disable_sites()
assert not multi_instrument_session.sites[1].is_site_enabled()
multi_instrument_session.sites[1].enable_sites()
assert multi_instrument_session.sites[1].is_site_enabled()
def test_enable_disable_sites_multiple(multi_instrument_session):
'''Test methods for multiple site enable configuration.
- enable_sites
- disable_sites
- is_site_enabled
'''
multi_instrument_session.load_pin_map(os.path.join(test_files_base_dir, "pin_map.pinmap"))
assert multi_instrument_session.sites[0].is_site_enabled()
assert multi_instrument_session.sites[1].is_site_enabled()
# Multiple site configuration
multi_instrument_session.sites[0, 1].disable_sites()
assert not multi_instrument_session.sites[0].is_site_enabled()
assert not multi_instrument_session.sites[1].is_site_enabled()
multi_instrument_session.sites[0, 1].enable_sites()
assert multi_instrument_session.sites[0].is_site_enabled()
assert multi_instrument_session.sites[1].is_site_enabled()
# All site configuration
multi_instrument_session.disable_sites()
assert not multi_instrument_session.sites[0].is_site_enabled()
assert not multi_instrument_session.sites[1].is_site_enabled()
multi_instrument_session.enable_sites()
assert multi_instrument_session.sites[0].is_site_enabled()
assert multi_instrument_session.sites[1].is_site_enabled()
def test_load_get_unload_patterns(multi_instrument_session):
'''Test basic pattern methods.
- load_pattern
- unload_all_patterns
'''
test_name = 'multiple_patterns'
multi_instrument_session.load_pin_map(get_test_file_path(test_name, 'pin_map.pinmap'))
multi_instrument_session.load_pattern(get_test_file_path(test_name, 'pattern_a.digipat'))
multi_instrument_session.load_pattern(get_test_file_path(test_name, 'pattern_b.digipat'))
multi_instrument_session.unload_all_patterns(unload_keep_alive_pattern=True)
def test_configure_pattern_burst_sites(multi_instrument_session):
# Also tests initiate
test_name = 'multiple_patterns'
configure_session(multi_instrument_session, test_name)
multi_instrument_session.load_pattern(get_test_file_path(test_name, 'pattern_b.digipat'))
multi_instrument_session.start_label = 'second_pattern'
multi_instrument_session.selected_function = nidigital.SelectedFunction.DIGITAL
multi_instrument_session.sites[0, 2, 3].configure_pattern_burst_sites()
multi_instrument_session.initiate()
multi_instrument_session.wait_until_done(timeout=hightime.timedelta(seconds=5.0))
result = multi_instrument_session.sites[0, 1, 3].get_site_pass_fail()
assert result == {0: True, 3: True}
def test_commit(multi_instrument_session):
multi_instrument_session.cycle_number_history_ram_trigger_cycle_number = 42
multi_instrument_session.commit()
assert multi_instrument_session.cycle_number_history_ram_trigger_cycle_number == 42
def test_initiate_context_manager_and_wait_until_done(multi_instrument_session):
'''Test initiate's context manager and pattern completion methods.
- with initiate
- wait_until_done
- is_done
'''
test_name = 'simple_pattern'
configure_session(multi_instrument_session, test_name)
multi_instrument_session.load_pattern(get_test_file_path(test_name, 'pattern.digipat'))
multi_instrument_session.start_label = 'new_pattern'
multi_instrument_session.selected_function = nidigital.SelectedFunction.DIGITAL
with multi_instrument_session.initiate():
# note that wait_until_done will return immediately with simulated hardware
multi_instrument_session.wait_until_done(timeout=hightime.timedelta(seconds=5.0))
assert multi_instrument_session.is_done()
def test_abort(multi_instrument_session):
test_name = 'simple_pattern'
configure_session(multi_instrument_session, test_name)
multi_instrument_session.load_pattern(get_test_file_path(test_name, 'pattern.digipat'))
multi_instrument_session.start_label = 'new_pattern'
multi_instrument_session.selected_function = nidigital.SelectedFunction.DIGITAL
multi_instrument_session.initiate()
multi_instrument_session.abort()
def test_abort_keep_alive(multi_instrument_session):
test_name = 'simple_pattern'
configure_session(multi_instrument_session, test_name)
multi_instrument_session.load_pattern(get_test_file_path(test_name, 'pattern.digipat'))
multi_instrument_session.start_label = 'new_pattern'
multi_instrument_session.selected_function = nidigital.SelectedFunction.DIGITAL
multi_instrument_session.initiate()
multi_instrument_session.abort_keep_alive()
def test_create_source_waveform_serial(multi_instrument_session):
test_name = 'test_create_source_waveform_serial'
configure_session(multi_instrument_session, test_name)
multi_instrument_session.load_pattern(get_test_file_path(test_name, 'pattern.digipat'))
multi_instrument_session.pins['LO0'].create_source_waveform_serial(
waveform_name='src_wfm',
data_mapping=nidigital.SourceDataMapping.BROADCAST,
sample_width=2,
bit_order=nidigital.BitOrder.LSB)
# load and burst the waveform to confirm that configuration went okay
multi_instrument_session.write_source_waveform_broadcast(
waveform_name='src_wfm',
waveform_data=[1, 2])
pass_fail = multi_instrument_session.burst_pattern(start_label='new_pattern')
assert pass_fail == {0: True, 1: True}
def test_create_source_waveform_from_file_tdms(multi_instrument_session):
test_name = 'test_source_waveform_parallel_broadcast'
configure_session(multi_instrument_session, test_name)
multi_instrument_session.load_pattern(get_test_file_path(test_name, 'pattern.digipat'))
multi_instrument_session.create_source_waveform_from_file_tdms(
waveform_name='src_wfm',
waveform_file_path=get_test_file_path(test_name, 'source_waveform.tdms'),
write_waveform_data=True)
# burst the waveform to confirm that configuration and loading went okay
pass_fail = multi_instrument_session.burst_pattern(start_label='new_pattern')
assert pass_fail == {0: True, 1: True}
def test_write_source_waveform_data_from_file_tdms(multi_instrument_session):
test_name = 'test_source_waveform_parallel_broadcast'
configure_session(multi_instrument_session, test_name)
multi_instrument_session.load_pattern(get_test_file_path(test_name, 'pattern.digipat'))
multi_instrument_session.create_source_waveform_from_file_tdms(
waveform_name='src_wfm',
waveform_file_path=get_test_file_path(test_name, 'source_waveform.tdms'),
write_waveform_data=False)
try: # confirm that the waveform is not yet loaded
multi_instrument_session.burst_pattern(start_label='new_pattern')
assert False
except nidigital.Error as e:
assert e.code == -1074118614
assert e.description.find('The source waveform(s) used in the pattern(s) to be burst have not been written to source memory.'
' Ensure that you write source waveforms with niDigital Write Source Waveform.') != -1
multi_instrument_session.write_source_waveform_data_from_file_tdms(
waveform_name='src_wfm',
waveform_file_path=get_test_file_path(test_name, 'source_waveform.tdms'))
# burst the waveform to confirm that configuration and loading went okay
pass_fail = multi_instrument_session.burst_pattern(start_label='new_pattern')
assert pass_fail == {0: True, 1: True}
def test_create_capture_waveform_serial(multi_instrument_session):
test_name = 'test_create_capture_waveform_serial'
configure_session(multi_instrument_session, test_name)
multi_instrument_session.load_pattern(get_test_file_path(test_name, 'pattern.digipat'))
num_samples = 2
multi_instrument_session.pins['HI0'].create_capture_waveform_serial(
waveform_name='capt_wfm',
sample_width=2,
bit_order=nidigital.BitOrder.LSB)
# The pattern references a wfm 'src_wfm', so we have to load it before we can burst
multi_instrument_session.pins['LO0'].create_source_waveform_serial(
waveform_name='src_wfm',
data_mapping=nidigital.SourceDataMapping.BROADCAST,
sample_width=2,
bit_order=nidigital.BitOrder.LSB)
multi_instrument_session.write_source_waveform_broadcast(
waveform_name='src_wfm',
waveform_data=[1, 2])
multi_instrument_session.burst_pattern(start_label='new_pattern')
# Fetch to confirm that configuration went okay
fetched_waveforms = multi_instrument_session.sites[1, 0].fetch_capture_waveform(
waveform_name='capt_wfm',
samples_to_read=num_samples)
assert sorted(fetched_waveforms.keys()) == sorted([0, 1])
assert all(len(fetched_waveforms[site]) == num_samples for site in fetched_waveforms)
def test_create_capture_waveform_from_file_digicapture(multi_instrument_session):
test_name = 'test_create_capture_waveform_serial'
configure_session(multi_instrument_session, test_name)
multi_instrument_session.load_pattern(get_test_file_path(test_name, 'pattern.digipat'))
num_samples = 2
multi_instrument_session.create_capture_waveform_from_file_digicapture(
waveform_name='capt_wfm',
waveform_file_path=get_test_file_path(test_name, 'capture_waveform.digicapture'))
# The pattern references a wfm 'src_wfm', so we have to load it before we can burst
multi_instrument_session.create_source_waveform_from_file_tdms(
waveform_name='src_wfm',
waveform_file_path=get_test_file_path(test_name, 'source_waveform.tdms'),
write_waveform_data=True)
multi_instrument_session.burst_pattern(start_label='new_pattern')
# Fetch to confirm that configuration went okay
fetched_waveforms = multi_instrument_session.sites[1, 0].fetch_capture_waveform(
waveform_name='capt_wfm',
samples_to_read=num_samples)
assert sorted(fetched_waveforms.keys()) == sorted([0, 1])
assert all(len(fetched_waveforms[site]) == num_samples for site in fetched_waveforms)
def test_send_software_edge_trigger(multi_instrument_session):
test_files_folder = 'simple_pattern'
configure_session(multi_instrument_session, test_files_folder)
multi_instrument_session.load_pattern(get_test_file_path(test_files_folder, 'pattern.digipat'))
multi_instrument_session.start_trigger_type = nidigital.TriggerType.SOFTWARE
multi_instrument_session.burst_pattern(start_label='new_pattern', wait_until_done=False)
multi_instrument_session.send_software_edge_trigger(
trigger=nidigital.SoftwareTrigger.START,
trigger_identifier='')
# We shouldn't time out, having sent the trigger, though in simulation it might complete, anyway
multi_instrument_session.wait_until_done(timeout=hightime.timedelta(seconds=5.0))
def test_specifications_levels_and_timing_single(multi_instrument_session):
'''Test methods for loading, applying and unloading specifications, levels, and timing files.
- apply_levels_and_timing
- load_specifications_levels_and_timing
- unload_specifications
'''
pinmap = get_test_file_path('specifications_levels_and_timing_single', 'pin_map.pinmap')
specs = get_test_file_path('specifications_levels_and_timing_single', 'specs.specs')
# Levels and timing files contain references to variables in specs1
levels = get_test_file_path('specifications_levels_and_timing_single', 'levels.digilevels')
timing = get_test_file_path('specifications_levels_and_timing_single', 'timing.digitiming')
multi_instrument_session.load_pin_map(file_path=pinmap)
multi_instrument_session.load_specifications_levels_and_timing(
specifications_file_paths=specs,
levels_file_paths=levels,
timing_file_paths=timing)
# Verify the loaded levels and timing sheets can be applied to hardware
multi_instrument_session.apply_levels_and_timing(levels_sheet='levels', timing_sheet='timing')
multi_instrument_session.unload_specifications(file_paths=specs)
# Verify reapplying the loaded levels and timing sheets throws
try:
multi_instrument_session.apply_levels_and_timing(levels_sheet='levels', timing_sheet='timing')
assert False
except nidigital.Error as e:
assert e.code == -1074118494
assert e.description.find('An error occurred while getting values from a levels sheet.') != -1
def test_specifications_levels_and_timing_multiple(multi_instrument_session):
'''Test methods for loading, applying and unloading multiple specifications, levels, and timing files.
- apply_levels_and_timing
- load_specifications_levels_and_timing
- unload_specifications
'''
pinmap = get_test_file_path('specifications_levels_and_timing_multiple', 'pin_map.pinmap')
specs1 = get_test_file_path('specifications_levels_and_timing_multiple', 'specs1.specs')
# Contains reference to variables in specs1
specs2 = get_test_file_path('specifications_levels_and_timing_multiple', 'specs2.specs')
# All levels and timing files contain references to variables in specs1 and specs2
levels1 = get_test_file_path('specifications_levels_and_timing_multiple', 'levels1.digilevels')
levels2 = get_test_file_path('specifications_levels_and_timing_multiple', 'levels2.digilevels')
timing1 = get_test_file_path('specifications_levels_and_timing_multiple', 'timing1.digitiming')
timing2 = get_test_file_path('specifications_levels_and_timing_multiple', 'timing2.digitiming')
multi_instrument_session.load_pin_map(file_path=pinmap)
multi_instrument_session.load_specifications_levels_and_timing(
specifications_file_paths=[specs1, specs2], # list
levels_file_paths=(levels1, levels2), # tuple
timing_file_paths=[timing1, timing2])
# Verify the loaded levels and timing sheets can be applied to hardware
multi_instrument_session.apply_levels_and_timing(levels_sheet='levels1', timing_sheet='timing2')
multi_instrument_session.apply_levels_and_timing(levels_sheet='levels2', timing_sheet='timing1')
multi_instrument_session.unload_specifications(file_paths=[specs1, specs2])
# Verify reapplying the loaded levels and timing sheets throws
try:
multi_instrument_session.apply_levels_and_timing(levels_sheet='levels1', timing_sheet='timing2')
assert False
except nidigital.Error as e:
assert e.code == -1074118494
assert e.description.find('An error occurred while getting values from a levels sheet.') != -1
def test_specifications_levels_and_timing_load_sequentially(multi_instrument_session):
'''Test methods for separately loading, applying and unloading multiple specifications, levels, and timing files.
- apply_levels_and_timing
- load_specifications_levels_and_timing
- unload_specifications
'''
pinmap = get_test_file_path('specifications_levels_and_timing_multiple', 'pin_map.pinmap')
specs1 = get_test_file_path('specifications_levels_and_timing_multiple', 'specs1.specs')
# Contains reference to variables in specs1
specs2 = get_test_file_path('specifications_levels_and_timing_multiple', 'specs2.specs')
# All levels and timing files contain references to variables in specs1 and specs2
levels1 = get_test_file_path('specifications_levels_and_timing_multiple', 'levels1.digilevels')
levels2 = get_test_file_path('specifications_levels_and_timing_multiple', 'levels2.digilevels')
timing1 = get_test_file_path('specifications_levels_and_timing_multiple', 'timing1.digitiming')
timing2 = get_test_file_path('specifications_levels_and_timing_multiple', 'timing2.digitiming')
multi_instrument_session.load_pin_map(file_path=pinmap)
# Load just the specs files first, in two separate calls
multi_instrument_session.load_specifications_levels_and_timing(specifications_file_paths=specs1)
multi_instrument_session.load_specifications_levels_and_timing(specifications_file_paths=[specs2])
# Then load both the levels together
multi_instrument_session.load_specifications_levels_and_timing(levels_file_paths=[levels2, levels1])
# Then load the two timing files in two separate calls
multi_instrument_session.load_specifications_levels_and_timing(timing_file_paths=[timing2])
multi_instrument_session.load_specifications_levels_and_timing(timing_file_paths=[timing1])
# Verify the loaded levels and timing sheets can be applied to hardware
multi_instrument_session.apply_levels_and_timing(levels_sheet='levels1', timing_sheet='timing2')
multi_instrument_session.apply_levels_and_timing(levels_sheet='levels2', timing_sheet='timing1')
multi_instrument_session.unload_specifications(file_paths=specs1)
multi_instrument_session.unload_specifications(file_paths=(specs2))
# Verify reapplying the loaded levels and timing sheets throws
try:
multi_instrument_session.apply_levels_and_timing(levels_sheet='levels1', timing_sheet='timing2')
assert False
except nidigital.Error as e:
assert e.code == -1074118494
assert e.description.find('An error occurred while getting values from a levels sheet.') != -1
def test_apply_levels_and_timing_initial_states(multi_instrument_session):
configure_session(multi_instrument_session, 'simple_pattern')
multi_instrument_session.sites[0, 2].apply_levels_and_timing(
levels_sheet='pin_levels',
timing_sheet='timing',
initial_state_high_pins=['HI0', 'LowPins'],
initial_state_tristate_pins='HI1, HI2')
|
from userInfoapp.models import UserInfo
from django.contrib import admin
class UserInfoAdmin(admin.ModelAdmin):
list_display = ["id", "user", "user_visit", "user_fdate", "user_ldate"]
admin.site.register(UserInfo, UserInfoAdmin)
|
/*
* Copyright (C) 2014
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
* 02111-1307, USA.
*
* ====================================================
* __ __ __ _____ __ __
* / / / | / / / ___/ / | / / SEZIONE di BARI
* / / / | |/ / / /_ / | |/ /
* / / / /| / / / __/ / /| / /
* /_/ /_/ |__/ /_/ /_/ |__/
*
* ====================================================
* Written by Giuseppe De Robertis <Giuseppe.DeRobertis@ba.infn.it>, 2014.
*
*/
#ifndef IPBUS_H
#define IPBUS_H
#include "wishbonebus.h"
#include "mdictionary.h"
#include <mutex>
#include <stdint.h>
#include <string>
class IPbusTransaction {
public:
uint8_t version;
uint16_t words;
uint8_t typeId;
uint8_t transactionId;
uint8_t infoCode;
uint32_t *readDataPtr;
};
class IPbus : public WishboneBus
{
public:
IPbus();
virtual ~IPbus();
void addIdle();
void addWrite(uint32_t address, uint32_t data);
void addWrite(int size, uint32_t address, uint32_t *data);
void addNIWrite(int size, uint32_t address, uint32_t *data);
void addRead(int size, uint32_t address, uint32_t *data);
void addRead(uint32_t address, uint32_t *data) { addRead(1, address, data); }
void addNIRead(int size, uint32_t address, uint32_t *data);
void addRMWbits(uint32_t address, uint32_t mask, uint32_t data, uint32_t *rData = NULL);
void addRMWsum(uint32_t address, uint32_t data, uint32_t *rData = NULL);
virtual void execute() = 0;
int getBufferSize() const { return bufferSize; }
virtual const std::string name() = 0;
// test functions
void addBadIdle(bool sendWrongVersion = false, bool sendWrongInfoCode = false);
void cutTX(int size) { txSize -= size; }
protected:
bool duplicatedRxPkt();
void processAnswer();
int getExpectedRxSize() { return expectedRxSize; }
private:
void chkBuffers(int txTransactionSize, int rxTransactionSize);
void addWord(uint32_t w);
uint32_t getWord();
void addHeader(uint16_t words, uint8_t typeId, uint32_t *readDataPtr);
void getHeader(IPbusTransaction *tr);
void clearList();
void dumpRxData();
protected:
uint8_t* txBuffer;
uint8_t* rxBuffer;
int txSize;
int rxSize;
int errorCode;
std::recursive_mutex mutex;
private:
static const int bufferSize;
IPbusTransaction* transactionList;
int numTransactions;
uint8_t transactionId;
int expectedRxSize;
int rxPtr;
int lastRxPktId;
};
#endif // IPBUS_H
|
from . import rollout
|
#!/usr/bin/env python3
import os
import subprocess
import sys
def y_n(q):
while True:
ri = input('{} (y/n): '.format(q))
if ri.lower() in ['yes', 'y']: return True
elif ri.lower() in ['no', 'n']: return False
def update_deps():
print("Attempting to update dependencies...")
try:
subprocess.check_call('"{}" -m pip install --no-warn-script-location --user -U -r requirements.txt'.format(sys.executable), shell=True)
except subprocess.CalledProcessError:
raise OSError("Could not update dependencies. You will need to run '\"{0}\" -m pip install -U -r requirements.txt' yourself.".format(sys.executable))
def finalize():
try:
from musicbot.constants import VERSION
print('The current MusicBot version is {0}.'.format(VERSION))
except Exception:
print('There was a problem fetching your current bot version. The installation may not have completed correctly.')
print("Done!")
def main():
print('Starting...')
# Make sure that we're in a Git repository
if not os.path.isdir('.git'):
raise EnvironmentError("This isn't a Git repository.")
# Make sure that we can actually use Git on the command line
# because some people install Git Bash without allowing access to Windows CMD
try:
subprocess.check_call('git --version', shell=True, stdout=subprocess.DEVNULL)
except subprocess.CalledProcessError:
raise EnvironmentError("Couldn't use Git on the CLI. You will need to run 'git pull' yourself.")
print("Passed Git checks...")
# Check that the current working directory is clean
sp = subprocess.check_output('git status --porcelain', shell=True, universal_newlines=True)
if sp:
oshit = y_n('You have modified files that are tracked by Git (e.g the bot\'s source files).\n'
'Should we try resetting the repo? You will lose local modifications.')
if oshit:
try:
subprocess.check_call('git reset --hard', shell=True)
except subprocess.CalledProcessError:
raise OSError("Could not reset the directory to a clean state.")
else:
wowee = y_n('OK, skipping bot update. Do you still want to update dependencies?')
if wowee:
update_deps()
else:
finalize()
return
print("Checking if we need to update the bot...")
try:
subprocess.check_call('git pull', shell=True)
except subprocess.CalledProcessError:
raise OSError("Could not update the bot. You will need to run 'git pull' yourself.")
update_deps()
finalize()
if __name__ == '__main__':
main()
|
#! /usr/bin/env python
from __future__ import absolute_import
from __future__ import print_function
import logging
from barf import BARF
from barf.core.reil import ReilMnemonic
logger = logging.getLogger(__name__)
def check_path_satisfiability(code_analyzer, path, start_address):
"""Check satisfiability of a basic block path.
"""
start_instr_found = False
sat = False
# Traverse basic blocks, translate its instructions to SMT
# expressions and add them as assertions.
for bb_curr, bb_next in zip(path[:-1], path[1:]):
logger.info("BB @ {:#x}".format(bb_curr.address))
# For each instruction...
for instr in bb_curr:
# If the start instruction have not been found, keep
# looking...
if not start_instr_found:
if instr.address == start_address:
start_instr_found = True
else:
continue
logger.info("{:#x} {}".format(instr.address, instr))
# For each REIL instruction...
for reil_instr in instr.ir_instrs:
logger.info("{:#x} {:02d} {}".format(reil_instr.address >> 0x8, reil_instr.address & 0xff,
reil_instr))
if reil_instr.mnemonic == ReilMnemonic.JCC:
# Check that the JCC is the last instruction of
# the basic block (skip CALL instructions.)
if instr.address + instr.size - 1 != bb_curr.end_address:
logger.error("Unexpected JCC instruction: {:#x} {} ({})".format(instr.address,
instr,
reil_instr))
# raise Exception()
continue
# Make sure branch target address from current
# basic block is the start address of the next.
assert(bb_curr.taken_branch == bb_next.address or
bb_curr.not_taken_branch == bb_next.address or
bb_curr.direct_branch == bb_next.address)
# Set branch condition accordingly.
if bb_curr.taken_branch == bb_next.address:
branch_var_goal = 0x1
elif bb_curr.not_taken_branch == bb_next.address:
branch_var_goal = 0x0
else:
continue
# Add branch condition goal constraint.
code_analyzer.add_constraint(code_analyzer.get_operand_expr(reil_instr.operands[0]) == branch_var_goal)
# The JCC instruction was the last within the
# current basic block. End this iteration and
# start next one.
break
# Translate and add SMT expressions to the solver.
code_analyzer.add_instruction(reil_instr)
sat = code_analyzer.check() == 'sat'
logger.info("BB @ {:#x} sat? {}".format(bb_curr.address, sat))
if not sat:
break
# Return satisfiability.
return sat
if __name__ == "__main__":
#
# Open file
#
barf = BARF("./samples/bin/constraint3.x86")
#
# Check constraint
#
# 80483ed: 55 push ebp
# 80483ee: 89 e5 mov ebp,esp
# 80483f0: 83 ec 10 sub esp,0x10
# 80483f3: c7 45 f0 01 00 00 00 mov DWORD PTR [ebp-0x10],0x1
# 80483fa: 81 7d f4 44 43 42 41 cmp DWORD PTR [ebp-0xc],0x41424344
# 8048401: 75 19 jne 804841c <main+0x2f>
# 8048403: 81 7d f8 48 47 46 45 cmp DWORD PTR [ebp-0x8],0x45464748
# 804840a: 75 10 jne 804841c <main+0x2f>
# 804840c: 81 7d fc ef cd ab 00 cmp DWORD PTR [ebp-0x4],0xabcdef
# 8048413: 75 07 jne 804841c <main+0x2f>
# 8048415: c7 45 f0 00 00 00 00 mov DWORD PTR [ebp-0x10],0x0
# 804841c: 8b 45 f0 mov eax,DWORD PTR [ebp-0x10]
# 804841f: c9 leave
# 8048420: c3 ret
start_addr = 0x80483ed
end_addr = 0x8048420
print("[+] Recovering function CFG...")
cfg = barf.recover_cfg(start_addr, end_addr)
print("[+] Checking path satisfiability...")
# Preconditions: set stack
# Note: this isn't strictly necessary but it helps reduce the time it
# takes the solver find a solution.
esp = barf.code_analyzer.get_register_expr("esp", mode="pre")
barf.code_analyzer.add_constraint(esp == 0xffffceec)
# Traverse paths and check satisfiability
for bb_path in cfg.all_simple_bb_paths(start_addr, end_addr):
print("[+] Path: {0}".format(" -> ".join([hex(bb.address) for bb in bb_path])))
if check_path_satisfiability(barf.code_analyzer, list(bb_path), start_addr):
print("[+] Satisfiable! Possible assignments:")
ebp = barf.code_analyzer.get_register_expr("ebp", mode="post")
rv = barf.code_analyzer.get_memory_expr(ebp-0x10, 4, mode="post")
cookie1 = barf.code_analyzer.get_memory_expr(ebp-0xc, 4, mode="post")
cookie2 = barf.code_analyzer.get_memory_expr(ebp-0x8, 4, mode="post")
cookie3 = barf.code_analyzer.get_memory_expr(ebp-0x4, 4, mode="post")
rv_val = barf.code_analyzer.get_expr_value(rv)
cookie1_val = barf.code_analyzer.get_expr_value(cookie1)
cookie2_val = barf.code_analyzer.get_expr_value(cookie2)
cookie3_val = barf.code_analyzer.get_expr_value(cookie3)
print("- cookie1: 0x{0:08x} ({0})".format(cookie1_val))
print("- cookie2: 0x{0:08x} ({0})".format(cookie2_val))
print("- cookie3: 0x{0:08x} ({0})".format(cookie3_val))
print("- rv: 0x{0:08x} ({0})".format(rv_val))
else:
print("[-] Unsatisfiable!")
|
// This file is part of Eigen, a lightweight C++ template library
// for linear algebra.
//
// Copyright (C) 2008 Gael Guennebaud <gael.guennebaud@inria.fr>
//
// This Source Code Form is subject to the terms of the Mozilla
// Public License v. 2.0. If a copy of the MPL was not distributed
// with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
#ifndef EIGEN_EULERANGLES_H
#define EIGEN_EULERANGLES_H
namespace Eigen {
/** \geometry_module \ingroup Geometry_Module
*
*
* \returns the Euler-angles of the rotation matrix \c *this using the convention defined by the triplet (\a a0,\a a1,\a a2)
*
* Each of the three parameters \a a0,\a a1,\a a2 represents the respective rotation axis as an integer in {0,1,2}.
* For instance, in:
* \code Vector3f ea = mat.eulerAngles(2, 0, 2); \endcode
* "2" represents the z axis and "0" the x axis, etc. The returned angles are such that
* we have the following equality:
* \code
* mat == AngleAxisf(ea[0], Vector3f::UnitZ())
* * AngleAxisf(ea[1], Vector3f::UnitX())
* * AngleAxisf(ea[2], Vector3f::UnitZ()); \endcode
* This corresponds to the right-multiply conventions (with right hand side frames).
*
* The returned angles are in the ranges [0:pi]x[-pi:pi]x[-pi:pi].
*
* \sa class AngleAxis
*/
template<typename Derived>
inline Matrix<typename MatrixBase<Derived>::Scalar,3,1>
MatrixBase<Derived>::eulerAngles(Index a0, Index a1, Index a2) const
{
using std::atan2;
using std::sin;
using std::cos;
/* Implemented from Graphics Gems IV */
EIGEN_STATIC_ASSERT_MATRIX_SPECIFIC_SIZE(Derived,3,3)
Matrix<Scalar,3,1> res;
typedef Matrix<typename Derived::Scalar,2,1> Vector2;
const Index odd = ((a0+1)%3 == a1) ? 0 : 1;
const Index i = a0;
const Index j = (a0 + 1 + odd)%3;
const Index k = (a0 + 2 - odd)%3;
if (a0==a2)
{
res[0] = atan2(coeff(j,i), coeff(k,i));
if((odd && res[0]<Scalar(0)) || ((!odd) && res[0]>Scalar(0)))
{
res[0] = (res[0] > Scalar(0)) ? res[0] - Scalar(M_PI) : res[0] + Scalar(M_PI);
Scalar s2 = Vector2(coeff(j,i), coeff(k,i)).norm();
res[1] = -atan2(s2, coeff(i,i));
}
else
{
Scalar s2 = Vector2(coeff(j,i), coeff(k,i)).norm();
res[1] = atan2(s2, coeff(i,i));
}
// With a=(0,1,0), we have i=0; j=1; k=2, and after computing the first two angles,
// we can compute their respective rotation, and apply its inverse to M. Since the result must
// be a rotation around x, we have:
//
// c2 s1.s2 c1.s2 1 0 0
// 0 c1 -s1 * M = 0 c3 s3
// -s2 s1.c2 c1.c2 0 -s3 c3
//
// Thus: m11.c1 - m21.s1 = c3 & m12.c1 - m22.s1 = s3
Scalar s1 = sin(res[0]);
Scalar c1 = cos(res[0]);
res[2] = atan2(c1*coeff(j,k)-s1*coeff(k,k), c1*coeff(j,j) - s1 * coeff(k,j));
}
else
{
res[0] = atan2(coeff(j,k), coeff(k,k));
Scalar c2 = Vector2(coeff(i,i), coeff(i,j)).norm();
if((odd && res[0]<Scalar(0)) || ((!odd) && res[0]>Scalar(0))) {
res[0] = (res[0] > Scalar(0)) ? res[0] - Scalar(M_PI) : res[0] + Scalar(M_PI);
res[1] = atan2(-coeff(i,k), -c2);
}
else
res[1] = atan2(-coeff(i,k), c2);
Scalar s1 = sin(res[0]);
Scalar c1 = cos(res[0]);
res[2] = atan2(s1*coeff(k,i)-c1*coeff(j,i), c1*coeff(j,j) - s1 * coeff(k,j));
}
if (!odd)
res = -res;
return res;
}
} // end namespace Eigen
#endif // EIGEN_EULERANGLES_H
|
//PRUEBA//
// SDK de Mercado Pago
const mercadopago = require ('mercadopago');
// Agrega credenciales
mercadopago.configure({
access_token: 'PROD_ACCESS_TOKEN'
});
// SDK de Mercado Pago
const mercadopago = require ('mercadopago');
// Agrega credenciales
mercadopago.configure({
access_token: 'PROD_ACCESS_TOKEN'
});
// Crea un objeto de preferencia
let preference = {
items: [
{
"sys": { "id": "1" },
"fields": {
"title": "Vainilla Brew",
"price": 1100,
"image": { "fields": { "file": { "url": "img/productModel.png" } } }
}
}
]
};
mercadopago.preferences.create(preference)
.then(function(response){
// Este valor reemplazará el string "<%= global.id %>" en tu HTML
global.id = response.body.id;
}).catch(function(error){
console.log(error);
});
//boton de pago <script src="https://www.mercadopago.com.ar/integrations/v1/web-payment-checkout.js" data-preference-id='<%= global.id %>'></script>
|
from __future__ import unicode_literals
import frappe
def execute():
columns = ("one_fm_applicant_civil_id", "one_fm_passport_applicant_number", "one_fm_previous_company_authorized_signatory", "one_fm_recruiter")
for column in columns:
if column in frappe.db.get_table_columns("Job Applicant"):
frappe.db.sql("alter table `tabJob Applicant` drop column {0}".format(column))
|
class SSHConfig(object):
"""A SSH configuration"""
def __init__(self, hostname, username, port, identityfile):
"""Create a new object
:param hostname: The hostname of the SSH server
:param username: The username to use for login
:param port: The port where the SSH server is listening for connections
:identityfile: The key to use for login
"""
self.hostname = hostname
self.username = username
self.port = port
self.identityfile = identityfile
|
"""
Utils and wrappers for scoring parsers.
"""
from classla.models.common.utils import ud_scores
def score(system_conllu_file, gold_conllu_file, verbose=True):
""" Wrapper for UD parser scorer. """
evaluation = ud_scores(gold_conllu_file, system_conllu_file)
el = evaluation['LAS']
p = el.precision
r = el.recall
f = el.f1
if verbose:
scores = [evaluation[k].f1 * 100 for k in ['LAS', 'MLAS', 'BLEX']]
print("LAS\tMLAS\tBLEX")
print("{:.2f}\t{:.2f}\t{:.2f}".format(*scores))
return p, r, f
|
from collections import OrderedDict
import numpy as np
import torch
import torch.optim as optim
from torch import nn as nn
import rlkit.torch.pytorch_util as ptu
from rlkit.core.eval_util import create_stats_ordered_dict
from rlkit.torch.torch_rl_algorithm import TorchTrainer
class DQNTrainer(TorchTrainer):
def __init__(
self,
qf,
target_qf,
learning_rate=1e-3,
soft_target_tau=1e-3,
target_update_period=1,
qf_criterion=None,
discount=0.99,
reward_scale=1.0,
):
super().__init__()
self.qf = qf
self.target_qf = target_qf
self.learning_rate = learning_rate
self.soft_target_tau = soft_target_tau
self.target_update_period = target_update_period
self.qf_optimizer = optim.Adam(
self.qf.parameters(),
lr=self.learning_rate,
)
self.discount = discount
self.reward_scale = reward_scale
self.qf_criterion = qf_criterion or nn.MSELoss()
self.eval_statistics = OrderedDict()
self._n_train_steps_total = 0
self._need_to_update_eval_statistics = True
def train_from_torch(self, batch):
rewards = batch['rewards'] * self.reward_scale
terminals = batch['terminals']
obs = batch['observations']
actions = batch['actions']
next_obs = batch['next_observations']
"""
Compute loss
"""
target_q_values = self.target_qf(next_obs).detach().max(1, keepdim=True)[0]
y_target = rewards + (1. - terminals) * self.discount * target_q_values
y_target = y_target.detach()
# actions is a one-hot vector
y_pred = torch.sum(self.qf(obs) * actions, dim=1, keepdim=True)
qf_loss = self.qf_criterion(y_pred, y_target)
"""
Soft target network updates
"""
self.qf_optimizer.zero_grad()
qf_loss.backward()
self.qf_optimizer.step()
"""
Soft Updates
"""
if self._n_train_steps_total % self.target_update_period == 0:
ptu.soft_update_from_to(self.qf, self.target_qf, self.soft_target_tau)
"""
Save some statistics for eval using just one batch.
"""
if self._need_to_update_eval_statistics:
self._need_to_update_eval_statistics = False
self.eval_statistics['QF Loss'] = np.mean(ptu.get_numpy(qf_loss))
self.eval_statistics.update(
create_stats_ordered_dict(
'Y Predictions',
ptu.get_numpy(y_pred),
))
def get_diagnostics(self):
return self.eval_statistics
def end_epoch(self, epoch):
self._need_to_update_eval_statistics = True
@property
def networks(self):
return [
self.qf,
self.target_qf,
]
def get_snapshot(self):
return dict(
qf=self.qf,
target_qf=self.target_qf,
)
|
$(document).ready(function () {
$("a[data-post]").click(function (e) {
e.preventDefault();
var $this = $(this);
var message = $this.data("post");
if (message && !confirm(message))
return;
$("<form>")
.attr("method", "post")
.attr("action", $this.attr("href"))
.appendTo(document.body)
.submit();
});
});
|
import {
MOSTRAR_ALERTA,
OCULTAR_ALERTA
} from '../types';
// Muestra una alerta
export function mostrarAlerta(alerta) {
return (distpach) => {
distpach(crearAlerta(alerta))
}
}
const crearAlerta = alerta => ({
type: MOSTRAR_ALERTA,
payload: alerta
})
// Ocultar Alerta
export function ocultarAlertaAction() {
return (dispatch) => {
dispatch( ocultarAlerta() )
}
}
const ocultarAlerta = () => ({
type: OCULTAR_ALERTA
})
|
/**
* ueditor完整配置项
* 可以在这里配置整个编辑器的特性
*/
/**************************提示********************************
* 所有被注释的配置项均为UEditor默认值。
* 修改默认配置请首先确保已经完全明确该参数的真实用途。
* 主要有两种修改方案,一种是取消此处注释,然后修改成对应参数;另一种是在实例化编辑器时传入对应参数。
* 当升级编辑器时,可直接使用旧版配置文件替换新版配置文件,不用担心旧版配置文件中因缺少新功能所需的参数而导致脚本报错。
**************************提示********************************/
(function () {
/**
* 编辑器资源文件根路径。它所表示的含义是:以编辑器实例化页面为当前路径,指向编辑器资源文件(即dialog等文件夹)的路径。
* 鉴于很多同学在使用编辑器的时候出现的种种路径问题,此处强烈建议大家使用"相对于网站根目录的相对路径"进行配置。
* "相对于网站根目录的相对路径"也就是以斜杠开头的形如"/myProject/ueditor/"这样的路径。
* 如果站点中有多个不在同一层级的页面需要实例化编辑器,且引用了同一UEditor的时候,此处的URL可能不适用于每个页面的编辑器。
* 因此,UEditor提供了针对不同页面的编辑器可单独配置的根路径,具体来说,在需要实例化编辑器的页面最顶部写上如下代码即可。当然,需要令此处的URL等于对应的配置。
* window.UEDITOR_HOME_URL = "/xxxx/xxxx/";
*/
var URL = window.UEDITOR_HOME_URL || getUEBasePath();
/**
* 配置项主体。注意,此处所有涉及到路径的配置别遗漏URL变量。
*/
window.UEDITOR_CONFIG = {
//为编辑器实例添加一个路径,这个不能被注释
UEDITOR_HOME_URL: URL
// 服务器统一请求接口路径
,
serverUrl: URL + "jsp/controller.jsp"
//工具栏上的所有的功能按钮和下拉框,可以在new编辑器的实例时选择自己需要的重新定义
,
toolbars: [
[
'fullscreen', 'source', '|', 'undo', 'redo', '|',
'bold', 'italic', 'underline', 'fontborder', 'strikethrough', 'superscript', 'subscript', 'removeformat', 'formatmatch', 'autotypeset', 'blockquote', 'pasteplain', '|', 'forecolor', 'backcolor', 'insertorderedlist', 'insertunorderedlist', 'selectall', 'cleardoc', '|',
'rowspacingtop', 'rowspacingbottom', 'lineheight', '|',
'customstyle', 'paragraph', 'fontfamily', 'fontsize', '|',
'directionalityltr', 'directionalityrtl', 'indent', '|',
'justifyleft', 'justifycenter', 'justifyright', 'justifyjustify', '|', 'touppercase', 'tolowercase', '|',
'link', 'unlink', 'anchor', '|', 'imagenone', 'imageleft', 'imageright', 'imagecenter', '|',
'simpleupload', 'insertimage', 'emotion', 'scrawl', 'insertvideo', 'music', 'attachment', 'map', 'gmap', 'insertframe', 'insertcode', 'webapp', 'pagebreak', 'template', 'background', '|',
'horizontal', 'date', 'time', 'spechars', 'snapscreen', 'wordimage', '|',
'inserttable', 'deletetable', 'insertparagraphbeforetable', 'insertrow', 'deleterow', 'insertcol', 'deletecol', 'mergecells', 'mergeright', 'mergedown', 'splittocells', 'splittorows', 'splittocols', 'charts', '|',
'print', 'preview', 'searchreplace', 'drafts', 'help'
]
]
//当鼠标放在工具栏上时显示的tooltip提示,留空支持自动多语言配置,否则以配置值为准
//,labelMap:{
// 'anchor':'', 'undo':''
//}
//语言配置项,默认是zh-cn。有需要的话也可以使用如下这样的方式来自动多语言切换,当然,前提条件是lang文件夹下存在对应的语言文件:
//lang值也可以通过自动获取 (navigator.language||navigator.browserLanguage ||navigator.userLanguage).toLowerCase()
//,lang:"zh-cn"
//,langPath:URL +"lang/"
//主题配置项,默认是default。有需要的话也可以使用如下这样的方式来自动多主题切换,当然,前提条件是themes文件夹下存在对应的主题文件:
//现有如下皮肤:default
//,theme:'default'
//,themePath:URL +"themes/"
//,zIndex : 900 //编辑器层级的基数,默认是900
//针对getAllHtml方法,会在对应的head标签中增加该编码设置。
//,charset:"utf-8"
//若实例化编辑器的页面手动修改的domain,此处需要设置为true
//,customDomain:false
//常用配置项目
//,isShow : true //默认显示编辑器
//,textarea:'editorValue' // 提交表单时,服务器获取编辑器提交内容的所用的参数,多实例时可以给容器name属性,会将name给定的值最为每个实例的键值,不用每次实例化的时候都设置这个值
//,initialContent:'欢迎使用ueditor!' //初始化编辑器的内容,也可以通过textarea/script给值,看官网例子
//,autoClearinitialContent:true //是否自动清除编辑器初始内容,注意:如果focus属性设置为true,这个也为真,那么编辑器一上来就会触发导致初始化的内容看不到了
//,focus:false //初始化时,是否让编辑器获得焦点true或false
//如果自定义,最好给p标签如下的行高,要不输入中文时,会有跳动感
//,initialStyle:'p{line-height:1em}'//编辑器层级的基数,可以用来改变字体等
//,iframeCssUrl: URL + '/themes/iframe.css' //给编辑区域的iframe引入一个css文件
//indentValue
//首行缩进距离,默认是2em
//,indentValue:'2em'
//,initialFrameWidth:1000 //初始化编辑器宽度,默认1000
//,initialFrameHeight:320 //初始化编辑器高度,默认320
//,readonly : false //编辑器初始化结束后,编辑区域是否是只读的,默认是false
//,autoClearEmptyNode : true //getContent时,是否删除空的inlineElement节点(包括嵌套的情况)
//启用自动保存
//,enableAutoSave: true
//自动保存间隔时间, 单位ms
//,saveInterval: 500
//,fullscreen : false //是否开启初始化时即全屏,默认关闭
//,imagePopup:true //图片操作的浮层开关,默认打开
//,autoSyncData:true //自动同步编辑器要提交的数据
//,emotionLocalization:false //是否开启表情本地化,默认关闭。若要开启请确保emotion文件夹下包含官网提供的images表情文件夹
//粘贴只保留标签,去除标签所有属性
//,retainOnlyLabelPasted: false
//,pasteplain:false //是否默认为纯文本粘贴。false为不使用纯文本粘贴,true为使用纯文本粘贴
//纯文本粘贴模式下的过滤规则
//'filterTxtRules' : function(){
// function transP(node){
// node.tagName = 'p';
// node.setStyle();
// }
// return {
// //直接删除及其字节点内容
// '-' : 'script style object iframe embed input select',
// 'p': {$:{}},
// 'br':{$:{}},
// 'div':{'$':{}},
// 'li':{'$':{}},
// 'caption':transP,
// 'th':transP,
// 'tr':transP,
// 'h1':transP,'h2':transP,'h3':transP,'h4':transP,'h5':transP,'h6':transP,
// 'td':function(node){
// //没有内容的td直接删掉
// var txt = !!node.innerText();
// if(txt){
// node.parentNode.insertAfter(UE.uNode.createText(' '),node);
// }
// node.parentNode.removeChild(node,node.innerText())
// }
// }
//}()
//,allHtmlEnabled:false //提交到后台的数据是否包含整个html字符串
//insertorderedlist
//有序列表的下拉配置,值留空时支持多语言自动识别,若配置值,则以此值为准
//,'insertorderedlist':{
// //自定的样式
// 'num':'1,2,3...',
// 'num1':'1),2),3)...',
// 'num2':'(1),(2),(3)...',
// 'cn':'一,二,三....',
// 'cn1':'一),二),三)....',
// 'cn2':'(一),(二),(三)....',
// //系统自带
// 'decimal' : '' , //'1,2,3...'
// 'lower-alpha' : '' , // 'a,b,c...'
// 'lower-roman' : '' , //'i,ii,iii...'
// 'upper-alpha' : '' , lang //'A,B,C'
// 'upper-roman' : '' //'I,II,III...'
//}
//insertunorderedlist
//无序列表的下拉配置,值留空时支持多语言自动识别,若配置值,则以此值为准
//,insertunorderedlist : { //自定的样式
// 'dash' :'— 破折号', //-破折号
// 'dot':' 。 小圆圈', //系统自带
// 'circle' : '', // '○ 小圆圈'
// 'disc' : '', // '● 小圆点'
// 'square' : '' //'■ 小方块'
//}
//,listDefaultPaddingLeft : '30'//默认的左边缩进的基数倍
//,listiconpath : 'http://bs.baidu.com/listicon/'//自定义标号的路径
//,maxListLevel : 3 //限制可以tab的级数, 设置-1为不限制
//,autoTransWordToList:false //禁止word中粘贴进来的列表自动变成列表标签
//fontfamily
//字体设置 label留空支持多语言自动切换,若配置,则以配置值为准
//,'fontfamily':[
// { label:'',name:'songti',val:'宋体,SimSun'},
// { label:'',name:'kaiti',val:'楷体,楷体_GB2312, SimKai'},
// { label:'',name:'yahei',val:'微软雅黑,Microsoft YaHei'},
// { label:'',name:'heiti',val:'黑体, SimHei'},
// { label:'',name:'lishu',val:'隶书, SimLi'},
// { label:'',name:'andaleMono',val:'andale mono'},
// { label:'',name:'arial',val:'arial, helvetica,sans-serif'},
// { label:'',name:'arialBlack',val:'arial black,avant garde'},
// { label:'',name:'comicSansMs',val:'comic sans ms'},
// { label:'',name:'impact',val:'impact,chicago'},
// { label:'',name:'timesNewRoman',val:'times new roman'}
//]
//fontsize
//字号
//,'fontsize':[10, 11, 12, 14, 16, 18, 20, 24, 36]
//paragraph
//段落格式 值留空时支持多语言自动识别,若配置,则以配置值为准
//,'paragraph':{'p':'', 'h1':'', 'h2':'', 'h3':'', 'h4':'', 'h5':'', 'h6':''}
//rowspacingtop
//段间距 值和显示的名字相同
//,'rowspacingtop':['5', '10', '15', '20', '25']
//rowspacingBottom
//段间距 值和显示的名字相同
//,'rowspacingbottom':['5', '10', '15', '20', '25']
//lineheight
//行内间距 值和显示的名字相同
//,'lineheight':['1', '1.5','1.75','2', '3', '4', '5']
//customstyle
//自定义样式,不支持国际化,此处配置值即可最后显示值
//block的元素是依据设置段落的逻辑设置的,inline的元素依据BIU的逻辑设置
//尽量使用一些常用的标签
//参数说明
//tag 使用的标签名字
//label 显示的名字也是用来标识不同类型的标识符,注意这个值每个要不同,
//style 添加的样式
//每一个对象就是一个自定义的样式
//,'customstyle':[
// {tag:'h1', name:'tc', label:'', style:'border-bottom:#ccc 2px solid;padding:0 4px 0 0;text-align:center;margin:0 0 20px 0;'},
// {tag:'h1', name:'tl',label:'', style:'border-bottom:#ccc 2px solid;padding:0 4px 0 0;margin:0 0 10px 0;'},
// {tag:'span',name:'im', label:'', style:'font-style:italic;font-weight:bold'},
// {tag:'span',name:'hi', label:'', style:'font-style:italic;font-weight:bold;color:rgb(51, 153, 204)'}
//]
//打开右键菜单功能
//,enableContextMenu: true
//右键菜单的内容,可以参考plugins/contextmenu.js里边的默认菜单的例子,label留空支持国际化,否则以此配置为准
//,contextMenu:[
// {
// label:'', //显示的名称
// cmdName:'selectall',//执行的command命令,当点击这个右键菜单时
// //exec可选,有了exec就会在点击时执行这个function,优先级高于cmdName
// exec:function () {
// //this是当前编辑器的实例
// //this.ui._dialogs['inserttableDialog'].open();
// }
// }
//]
//快捷菜单
//,shortcutMenu:["fontfamily", "fontsize", "bold", "italic", "underline", "forecolor", "backcolor", "insertorderedlist", "insertunorderedlist"]
//elementPathEnabled
//是否启用元素路径,默认是显示
//,elementPathEnabled : true
//wordCount
//,wordCount:true //是否开启字数统计
//,maximumWords:10000 //允许的最大字符数
//字数统计提示,{#count}代表当前字数,{#leave}代表还可以输入多少字符数,留空支持多语言自动切换,否则按此配置显示
//,wordCountMsg:'' //当前已输入 {#count} 个字符,您还可以输入{#leave} 个字符
//超出字数限制提示 留空支持多语言自动切换,否则按此配置显示
//,wordOverFlowMsg:'' //<span style="color:red;">你输入的字符个数已经超出最大允许值,服务器可能会拒绝保存!</span>
//tab
//点击tab键时移动的距离,tabSize倍数,tabNode什么字符做为单位
//,tabSize:4
//,tabNode:' '
//removeFormat
//清除格式时可以删除的标签和属性
//removeForamtTags标签
//,removeFormatTags:'b,big,code,del,dfn,em,font,i,ins,kbd,q,samp,small,span,strike,strong,sub,sup,tt,u,var'
//removeFormatAttributes属性
//,removeFormatAttributes:'class,style,lang,width,height,align,hspace,valign'
//undo
//可以最多回退的次数,默认20
//,maxUndoCount:20
//当输入的字符数超过该值时,保存一次现场
//,maxInputCount:1
//autoHeightEnabled
// 是否自动长高,默认true
//,autoHeightEnabled:true
//scaleEnabled
//是否可以拉伸长高,默认true(当开启时,自动长高失效)
//,scaleEnabled:false
//,minFrameWidth:800 //编辑器拖动时最小宽度,默认800
//,minFrameHeight:220 //编辑器拖动时最小高度,默认220
//autoFloatEnabled
//是否保持toolbar的位置不动,默认true
//,autoFloatEnabled:true
//浮动时工具栏距离浏览器顶部的高度,用于某些具有固定头部的页面
//,topOffset:30
//编辑器底部距离工具栏高度(如果参数大于等于编辑器高度,则设置无效)
//,toolbarTopOffset:400
//设置远程图片是否抓取到本地保存
//,catchRemoteImageEnable: true //设置是否抓取远程图片
//pageBreakTag
//分页标识符,默认是_ueditor_page_break_tag_
//,pageBreakTag:'_ueditor_page_break_tag_'
//autotypeset
//自动排版参数
//,autotypeset: {
// mergeEmptyline: true, //合并空行
// removeClass: true, //去掉冗余的class
// removeEmptyline: false, //去掉空行
// textAlign:"left", //段落的排版方式,可以是 left,right,center,justify 去掉这个属性表示不执行排版
// imageBlockLine: 'center', //图片的浮动方式,独占一行剧中,左右浮动,默认: center,left,right,none 去掉这个属性表示不执行排版
// pasteFilter: false, //根据规则过滤没事粘贴进来的内容
// clearFontSize: false, //去掉所有的内嵌字号,使用编辑器默认的字号
// clearFontFamily: false, //去掉所有的内嵌字体,使用编辑器默认的字体
// removeEmptyNode: false, // 去掉空节点
// //可以去掉的标签
// removeTagNames: {标签名字:1},
// indent: false, // 行首缩进
// indentValue : '2em', //行首缩进的大小
// bdc2sb: false,
// tobdc: false
//}
//tableDragable
//表格是否可以拖拽
//,tableDragable: true
//sourceEditor
//源码的查看方式,codemirror 是代码高亮,textarea是文本框,默认是codemirror
//注意默认codemirror只能在ie8+和非ie中使用
//,sourceEditor:"codemirror"
//如果sourceEditor是codemirror,还用配置一下两个参数
//codeMirrorJsUrl js加载的路径,默认是 URL + "third-party/codemirror/codemirror.js"
//,codeMirrorJsUrl:URL + "third-party/codemirror/codemirror.js"
//codeMirrorCssUrl css加载的路径,默认是 URL + "third-party/codemirror/codemirror.css"
//,codeMirrorCssUrl:URL + "third-party/codemirror/codemirror.css"
//编辑器初始化完成后是否进入源码模式,默认为否。
//,sourceEditorFirst:false
//iframeUrlMap
//dialog内容的路径 ~会被替换成URL,垓属性一旦打开,将覆盖所有的dialog的默认路径
//,iframeUrlMap:{
// 'anchor':'~/dialogs/anchor/anchor.html',
//}
//allowLinkProtocol 允许的链接地址,有这些前缀的链接地址不会自动添加http
//, allowLinkProtocols: ['http:', 'https:', '#', '/', 'ftp:', 'mailto:', 'tel:', 'git:', 'svn:']
//webAppKey 百度应用的APIkey,每个站长必须首先去百度官网注册一个key后方能正常使用app功能,注册介绍,http://app.baidu.com/static/cms/getapikey.html
//, webAppKey: ""
//默认过滤规则相关配置项目
//,disabledTableInTable:true //禁止表格嵌套
//,allowDivTransToP:true //允许进入编辑器的div标签自动变成p标签
//,rgb2Hex:true //默认产出的数据中的color自动从rgb格式变成16进制格式
// xss 过滤是否开启,inserthtml等操作
,
xssFilterRules: true
//input xss过滤
,
inputXssFilter: true
//output xss过滤
,
outputXssFilter: true
// xss过滤白名单 名单来源: https://raw.githubusercontent.com/leizongmin/js-xss/master/lib/default.js
,
whitList: {
a: ['target', 'href', 'title', 'class', 'style'],
abbr: ['title', 'class', 'style'],
address: ['class', 'style'],
area: ['shape', 'coords', 'href', 'alt'],
article: [],
aside: [],
audio: ['autoplay', 'controls', 'loop', 'preload', 'src', 'class', 'style'],
b: ['class', 'style'],
bdi: ['dir'],
bdo: ['dir'],
big: [],
blockquote: ['cite', 'class', 'style'],
br: [],
caption: ['class', 'style'],
center: [],
cite: [],
code: ['class', 'style'],
col: ['align', 'valign', 'span', 'width', 'class', 'style'],
colgroup: ['align', 'valign', 'span', 'width', 'class', 'style'],
dd: ['class', 'style'],
del: ['datetime'],
details: ['open'],
div: ['class', 'style'],
dl: ['class', 'style'],
dt: ['class', 'style'],
em: ['class', 'style'],
font: ['color', 'size', 'face'],
footer: [],
h1: ['class', 'style'],
h2: ['class', 'style'],
h3: ['class', 'style'],
h4: ['class', 'style'],
h5: ['class', 'style'],
h6: ['class', 'style'],
header: [],
hr: [],
i: ['class', 'style'],
img: ['src', 'alt', 'title', 'width', 'height', 'id', '_src', 'loadingclass', 'class', 'data-latex'],
ins: ['datetime'],
li: ['class', 'style'],
mark: [],
nav: [],
ol: ['class', 'style'],
p: ['class', 'style'],
pre: ['class', 'style'],
s: [],
section: [],
small: [],
span: ['class', 'style'],
sub: ['class', 'style'],
sup: ['class', 'style'],
strong: ['class', 'style'],
table: ['width', 'border', 'align', 'valign', 'class', 'style'],
tbody: ['align', 'valign', 'class', 'style'],
td: ['width', 'rowspan', 'colspan', 'align', 'valign', 'class', 'style'],
tfoot: ['align', 'valign', 'class', 'style'],
th: ['width', 'rowspan', 'colspan', 'align', 'valign', 'class', 'style'],
thead: ['align', 'valign', 'class', 'style'],
tr: ['rowspan', 'align', 'valign', 'class', 'style'],
tt: [],
u: [],
ul: ['class', 'style'],
video: ['autoplay', 'controls', 'loop', 'preload', 'src', 'height', 'width', 'class', 'style']
}
};
function getUEBasePath(docUrl, confUrl) {
return getBasePath(docUrl || self.document.URL || self.location.href, confUrl || getConfigFilePath());
}
function getConfigFilePath() {
var configPath = document.getElementsByTagName('script');
return configPath[configPath.length - 1].src;
}
function getBasePath(docUrl, confUrl) {
var basePath = confUrl;
if (/^(\/|\\\\)/.test(confUrl)) {
basePath = /^.+?\w(\/|\\\\)/.exec(docUrl)[0] + confUrl.replace(/^(\/|\\\\)/, '');
} else if (!/^[a-z]+:/i.test(confUrl)) {
docUrl = docUrl.split("#")[0].split("?")[0].replace(/[^\\\/]+$/, '');
basePath = docUrl + "" + confUrl;
}
console.log(basePath)
return optimizationPath(basePath);
}
function optimizationPath(path) {
var protocol = /^[a-z]+:\/\//.exec(path)[0],
tmp = null,
res = [];
path = path.replace(protocol, "").split("?")[0].split("#")[0];
path = path.replace(/\\/g, '/').split(/\//);
path[path.length - 1] = "";
while (path.length) {
if ((tmp = path.shift()) === "..") {
res.pop();
} else if (tmp !== ".") {
res.push(tmp);
}
}
return protocol + res.join("/");
}
window.UE = {
getUEBasePath: getUEBasePath
};
})();
|
#! /usr/bin/env python
##############################################################################
## DendroPy Phylogenetic Computing Library.
##
## Copyright 2010-2015 Jeet Sukumaran and Mark T. Holder.
## All rights reserved.
##
## See "LICENSE.rst" for terms and conditions of usage.
##
## If you use this work or any portion thereof in published work,
## please cite it as:
##
## Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
## for phylogenetic computing. Bioinformatics 26: 1569-1571.
##
##############################################################################
"""
Tests for NEWICK taxon handling.
"""
import sys
import os
import unittest
import dendropy
from dendropy.utility import error
from dendropy.test.support import dendropytest
from dendropy.dataio import nexusreader
from dendropy.dataio import nexusprocessing
class TaxonSymbolMappingTest(unittest.TestCase):
def test_standard_lookup_and_create(self):
labels = ["t{}".format(i) for i in range(1, 101)]
tns = dendropy.TaxonNamespace()
tsm = nexusprocessing.NexusTaxonSymbolMapper(taxon_namespace=tns)
for idx, label in enumerate(labels):
self.assertEqual(len(tns), idx)
t1 = tsm.require_taxon_for_symbol(label)
self.assertEqual(len(tns), idx+1)
self.assertEqual(t1.label, label)
t2 = tsm.require_taxon_for_symbol(label)
self.assertEqual(len(tns), idx+1)
self.assertIs(t1, t2)
self.assertEqual(t2.label, label)
t3 = tsm.require_taxon_for_symbol(str(idx+1))
self.assertEqual(len(tns), idx+1)
self.assertIs(t1, t3)
self.assertEqual(t3.label, label)
assert label.upper() != label
t4 = tsm.require_taxon_for_symbol(label.upper())
self.assertEqual(len(tns), idx+1)
self.assertIs(t1, t4)
self.assertEqual(t4.label, label)
def test_no_number_lookup_and_create(self):
# looking up a number symbol should result in new taxon creation
labels = ["t{}".format(i) for i in range(1, 101)]
tns = dendropy.TaxonNamespace()
tsm = nexusprocessing.NexusTaxonSymbolMapper(taxon_namespace=tns,
enable_lookup_by_taxon_number=False)
for idx, label in enumerate(labels):
self.assertEqual(len(tns), idx)
t1 = tsm.require_taxon_for_symbol(label)
self.assertEqual(len(tns), idx+1)
self.assertEqual(t1.label, label)
t2 = tsm.require_taxon_for_symbol(label)
self.assertEqual(len(tns), idx+1)
self.assertIs(t1, t2)
self.assertEqual(t2.label, label)
t3 = tsm.lookup_taxon_symbol(str(idx+1), create_taxon_if_not_found=False)
self.assertIs(t3, None)
self.assertEqual(len(tns), idx+1)
def test_no_number_lookup_and_create2(self):
# looking up a number symbol should result in new taxon creation
labels = ["t{}".format(i) for i in range(1, 101)]
tns = dendropy.TaxonNamespace()
tsm = nexusprocessing.NexusTaxonSymbolMapper(taxon_namespace=tns,
enable_lookup_by_taxon_number=False)
taxa = []
for label_idx, label in enumerate(labels):
t = dendropy.Taxon(label)
tsm.add_taxon(t)
taxa.append(t)
self.assertEqual(len(tns), len(labels))
for label_idx, label in enumerate(labels):
t1 = tsm.require_taxon_for_symbol(label_idx+1)
self.assertNotIn(t1, taxa)
self.assertEqual(t1.label, str(label_idx+1))
self.assertEqual(len(tns), len(labels)+label_idx+1)
def test_new_taxon(self):
labels = ["t{}".format(i) for i in range(1, 101)]
tns = dendropy.TaxonNamespace()
tsm = nexusprocessing.NexusTaxonSymbolMapper(taxon_namespace=tns)
for label_idx, label in enumerate(labels):
t = tsm.new_taxon(label)
self.assertEqual(len(tns), label_idx+1)
self.assertEqual(t.label, label)
self.assertIs(tsm.require_taxon_for_symbol(label), t)
self.assertEqual(len(tns), label_idx+1)
self.assertIs(tsm.require_taxon_for_symbol(str(label_idx+1)), t)
self.assertEqual(len(tns), label_idx+1)
self.assertEqual(len(tns), len(labels))
def test_add_taxon(self):
labels = ["t{}".format(i) for i in range(1, 101)]
tns = dendropy.TaxonNamespace()
tsm = nexusprocessing.NexusTaxonSymbolMapper(taxon_namespace=tns)
for label_idx, label in enumerate(labels):
t = dendropy.Taxon(label)
tsm.add_taxon(t)
self.assertEqual(len(tns), label_idx+1)
self.assertEqual(t.label, label)
self.assertIs(tsm.require_taxon_for_symbol(label), t)
self.assertEqual(len(tns), label_idx+1)
self.assertIs(tsm.require_taxon_for_symbol(str(label_idx+1)), t)
self.assertEqual(len(tns), label_idx+1)
self.assertEqual(len(tns), len(labels))
def test_simple_token_lookup(self):
labels = ["t{}".format(i) for i in range(1, 101)]
tns = dendropy.TaxonNamespace()
tsm = nexusprocessing.NexusTaxonSymbolMapper(taxon_namespace=tns)
translate = {}
t_labels = {}
for label_idx, label in enumerate(labels):
t = dendropy.Taxon(label)
t_labels[t] = t.label
tsm.add_taxon(t)
token = label_idx + 1
translate[token] = t
tsm.add_translate_token(token, t)
self.assertEqual(len(tns), len(labels))
for token in translate:
t1 = translate[token]
t2 = tsm.require_taxon_for_symbol(token)
self.assertIs(t1, t2)
self.assertEqual(t2.label, t_labels[t1])
self.assertEqual(len(tns), len(labels))
def test_tricky_token_lookup(self):
labels = ["t{}".format(i) for i in range(1, 101)]
tns = dendropy.TaxonNamespace()
tsm = nexusprocessing.NexusTaxonSymbolMapper(taxon_namespace=tns)
translate = {}
t_labels = {}
for label_idx, label in enumerate(labels):
t = dendropy.Taxon(label)
t_labels[t] = t.label
tsm.add_taxon(t)
token = str(len(labels) - label_idx)
translate[token] = t
tsm.add_translate_token(token, t)
self.assertEqual(len(tns), len(labels))
for token in translate:
t1 = translate[token]
t2 = tsm.require_taxon_for_symbol(token)
self.assertIs(t1, t2)
self.assertEqual(t2.label, t_labels[t1])
self.assertEqual(len(tns), len(labels))
def test_mixed_token_lookup(self):
labels = ["t{}".format(i) for i in range(1, 101)]
tns = dendropy.TaxonNamespace()
tsm = nexusprocessing.NexusTaxonSymbolMapper(taxon_namespace=tns)
translate = {}
t_labels = {}
labels_t = {}
for label_idx, label in enumerate(labels):
t = dendropy.Taxon(label)
t_labels[t] = t.label
labels_t[t.label] = t
tsm.add_taxon(t)
if label_idx % 2 == 0:
token = str(label_idx+1)
translate[token] = t
tsm.add_translate_token(token, t)
self.assertEqual(len(tns), len(labels))
for label_idx, label in enumerate(labels):
token = label_idx + 1
t1 = tsm.require_taxon_for_symbol(token)
self.assertEqual(len(tns), len(labels))
self.assertEqual(t1.label, label)
self.assertIs(t1, labels_t[label])
if token in translate:
self.assertIs(t1, translate[token])
self.assertEqual(len(tns), len(labels))
def test_taxon_namespace_locking(self):
tns = dendropy.TaxonNamespace()
tsm = nexusprocessing.NexusTaxonSymbolMapper(taxon_namespace=tns)
self.assertFalse(tns.is_mutable)
del tsm
self.assertTrue(tns.is_mutable)
class NexusTaxaCaseInsensitivityTest(unittest.TestCase):
def setUp(self):
self.data_str = """\
#NEXUS
BEGIN TAXA;
DIMENSIONS NTAX=5;
TAXLABELS AAA BBB CCC DDD EEE;
END;
BEGIN CHARACTERS;
DIMENSIONS NCHAR=8;
FORMAT DATATYPE=DNA GAP=- MISSING=? MATCHCHAR=. INTERLEAVE;
MATRIX
AAA ACGT
BBB ACGT
CCC ACGT
DDD ACGT
EEE ACGT
aaa ACGT
bbb ACGT
ccc ACGT
ddd ACGT
eee ACGT
;
END;
"""
def testCaseInsensitiveChars(self):
d = dendropy.DnaCharacterMatrix.get_from_string(self.data_str, 'nexus', case_sensitive_taxon_labels=False)
expected = ["AAA", "BBB", "CCC", "DDD", "EEE"]
observed = [t.label.upper() for t in d.taxon_namespace]
for i, x in enumerate(expected):
self.assertTrue(x in observed)
for i, x in enumerate(observed):
self.assertTrue(x in expected)
self.assertEqual(len(d.taxon_namespace), 5)
def testCaseSensitiveChars(self):
#d = dendropy.DnaCharacterMatrix.get_from_string(self.data_str, 'nexus', case_sensitive_taxon_labels=False)
self.assertRaises(error.DataParseError,
dendropy.DnaCharacterMatrix.get_from_string,
self.data_str,
'nexus',
case_sensitive_taxon_labels=True)
def testDefaultCaseSensitivityChars(self):
d = dendropy.DnaCharacterMatrix.get_from_string(self.data_str, 'nexus')
expected = ["AAA", "BBB", "CCC", "DDD", "EEE"]
observed = [t.label.upper() for t in d.taxon_namespace]
for i, x in enumerate(expected):
self.assertTrue(x in observed)
for i, x in enumerate(observed):
self.assertTrue(x in expected)
self.assertEqual(len(d.taxon_namespace), 5)
class NexusTooManyTaxaTest(
dendropytest.ExtendedTestCase):
def testTooManyTaxaNonInterleaved(self):
data_str = """\
#NEXUS
BEGIN TAXA;
DIMENSIONS NTAX=2;
TAXLABELS AAA BBB CCC DDD EEE;
END;
"""
self.assertRaises(nexusreader.NexusReader.TooManyTaxaError,
dendropy.DnaCharacterMatrix.get_from_string,
data_str,
'nexus')
if __name__ == "__main__":
unittest.main()
|
# Copyright 2017 The Forseti Security Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Rules engine for checking crypto keys configuration."""
from builtins import object
from collections import namedtuple
import datetime
import threading
from google.cloud.forseti.common.gcp_type import resource_util
from google.cloud.forseti.common.util import logger, date_time, string_formats
from google.cloud.forseti.scanner.audit import base_rules_engine as bre
from google.cloud.forseti.scanner.audit import errors as audit_errors
LOGGER = logger.get_logger(__name__)
VIOLATION_TYPE = 'CRYPTO_KEY_VIOLATION'
# Rule Modes.
WHITELIST = 'whitelist'
BLACKLIST = 'blacklist'
RULE_MODES = frozenset([BLACKLIST, WHITELIST])
class KMSRulesEngine(bre.BaseRulesEngine):
"""Rules engine for KMS scanner."""
def __init__(self, rules_file_path, snapshot_timestamp=None):
"""Initialize.
Args:
rules_file_path (str): file location of rules
snapshot_timestamp (str): snapshot timestamp. Defaults to None.
If set, this will be the snapshot timestamp
used in the engine.
"""
super(KMSRulesEngine,
self).__init__(rules_file_path=rules_file_path)
self.rule_book = None
self.snapshot_timestamp = snapshot_timestamp
self._lock = threading.Lock()
def build_rule_book(self, global_configs=None):
"""Build KMSRuleBook from the rules definition file.
Args:
global_configs (dict): Global configurations.
"""
with self._lock:
self.rule_book = KMSRuleBook(
self._load_rule_definitions())
def find_violations(self, key, force_rebuild=False):
"""Determine whether crypto key configuration violates rules.
Args:
key (CryptoKey): A crypto key resource to check.
force_rebuild (bool): If True, rebuilds the rule book. This will
reload the rules definition file and add the rules to the book.
Returns:
generator: A generator of rule violations.
"""
res = self.rule_book is None or force_rebuild
if res:
self.build_rule_book()
violations = self.rule_book.find_violations(key)
return violations
def add_rules(self, rules):
"""Add rules to the rule book.
Args:
rules (list): The list of rules to add to the book.
"""
if self.rule_book is not None:
self.rule_book.add_rules(rules)
class KMSRuleBook(bre.BaseRuleBook):
"""The RuleBook for crypto key rules."""
supported_resource_types = frozenset([
'organization'
])
def __init__(self, rule_defs=None):
"""Initialization.
Args:
rule_defs (list): CryptoKeys rule definition dicts
"""
super(KMSRuleBook, self).__init__()
self._lock = threading.Lock()
self.resource_rules_map = {}
if not rule_defs:
self.rule_defs = {}
else:
self.rule_defs = rule_defs
self.add_rules(rule_defs)
def __eq__(self, other):
"""Equals.
Args:
other (object): Object to compare.
Returns:
bool: True or False.
"""
if not isinstance(other, type(self)):
return NotImplemented
return self.resource_rules_map == other.resource_rules_map
def __ne__(self, other):
"""Not Equals.
Args:
other (object): Object to compare.
Returns:
bool: True or False.
"""
return not self == other
def __repr__(self):
"""Object representation.
Returns:
str: The object representation.
"""
return 'KMSRuleBook <{}>'.format(self.resource_rules_map)
def add_rules(self, rule_defs):
"""Add rules to the rule book.
Args:
rule_defs (dict): rule definitions dictionary
"""
for (i, rule) in enumerate(rule_defs.get('rules', [])):
self.add_rule(rule, i)
def add_rule(self, rule_def, rule_index):
"""Add a rule to the rule book.
Args:
rule_def (dict): A dictionary containing rule definition
properties.
rule_index (int): The index of the rule from the rule definitions.
Assigned automatically when the rule book is built.
"""
resources = rule_def.get('resource')
mode = rule_def.get('mode')
key = rule_def.get('key')
if not resources or key is None or mode not in RULE_MODES:
raise audit_errors.InvalidRulesSchemaError(
'Faulty rule {}'.format(rule_index))
for resource in resources:
resource_type = resource.get('type')
resource_ids = resource.get('resource_ids')
if resource_type not in self.supported_resource_types:
raise audit_errors.InvalidRulesSchemaError(
'Invalid resource type in rule {}'.format(rule_index))
if not resource_ids or len(resource_ids) < 1:
raise audit_errors.InvalidRulesSchemaError(
'Missing resource ids in rule {}'.format(rule_index))
# For each resource id associated with the rule, create a
# mapping of resource => rules.
for resource_id in resource_ids:
gcp_resource = resource_util.create_resource(
resource_id=resource_id,
resource_type=resource_type)
rule_def_resource = {
'key': key,
'mode': mode
}
rule = Rule(rule_name=rule_def.get('name'),
rule_index=rule_index,
rule=rule_def_resource)
resource_rules = self.resource_rules_map.setdefault(
gcp_resource, ResourceRules(resource=gcp_resource))
if not resource_rules:
self.resource_rules_map[rule_index] = rule
if rule not in resource_rules.rules:
resource_rules.rules.add(rule)
def get_resource_rules(self, resource):
"""Get all the resource rules for resource.
Args:
resource (Resource): The gcp_type Resource find in the map.
Returns:
ResourceRules: A ResourceRules object.
"""
return self.resource_rules_map.get(resource)
def find_violations(self, key):
"""Find crypto key violations in the rule book.
Args:
key (CryptoKey): The GCP resource to check for violations.
Returns:
RuleViolation: resource crypto key rule violations.
"""
LOGGER.debug('Looking for crypto key violations: %s',
key.name)
violations = []
resource_ancestors = resource_util.get_ancestors_from_full_name(
key.crypto_key_full_name)
LOGGER.debug('Ancestors of resource: %r', resource_ancestors)
checked_wildcards = set()
for curr_resource in resource_ancestors:
if not curr_resource:
# The leaf node in the hierarchy
continue
resource_rule = self.get_resource_rules(curr_resource)
if resource_rule:
violations.extend(
resource_rule.find_violations(key))
wildcard_resource = resource_util.create_resource(
resource_id='*', resource_type=curr_resource.type)
if wildcard_resource in checked_wildcards:
continue
checked_wildcards.add(wildcard_resource)
resource_rule = self.get_resource_rules(wildcard_resource)
if resource_rule:
violations.extend(
resource_rule.find_violations(key))
LOGGER.debug('Returning violations: %r', violations)
return violations
class ResourceRules(object):
"""An association of a resource to rules."""
def __init__(self,
resource=None,
rules=None):
"""Initialize.
Args:
resource (Resource): The resource to associate with the rule.
rules (set): rules to associate with the resource.
"""
if not isinstance(rules, set):
rules = set([])
self.resource = resource
self.rules = rules
def find_violations(self, key):
"""Determine if the policy binding matches this rule's criteria.
Args:
key (CryptoKey): crypto key resource.
Returns:
list: RuleViolation
"""
violations = []
for rule in self.rules:
rule_violations = rule.find_violations(key)
if rule_violations:
violations.extend(rule_violations)
return violations
def __eq__(self, other):
"""Compare == with another object.
Args:
other (ResourceRules): object to compare with
Returns:
int: comparison result
"""
if not isinstance(other, type(self)):
return NotImplemented
return (self.resource == other.resource and
self.rules == other.rules)
def __ne__(self, other):
"""Compare != with another object.
Args:
other (object): object to compare with
Returns:
int: comparison result
"""
return not self == other
def __repr__(self):
"""String representation of this node.
Returns:
str: debug string
"""
return 'KMSResourceRules<resource={}, rules={}>'.format(
self.resource, self.rules)
class Rule(object):
"""Rule properties from the rule definition file, also finds violations."""
def __init__(self, rule_name, rule_index, rule):
"""Initialize.
Args:
rule_name (str): Name of the loaded rule.
rule_index (int): The index of the rule from the rule definitions.
rule (dict): The rule definition from the file.
"""
self.rule_name = rule_name
self.rule_index = rule_index
self.rule = rule
@classmethod
def find_match_rotation_period(cls, key, rotation_period, mode):
"""Check if there is a match for this rule rotation period against the
given resource.
If the mode is whitelist and days since the key was last rotated is less
than or equals to the rotation period specified then there is no
violation.
If the mode is blacklist and days since the key was last rotated is
greater than the rotation period specified then there is a violation.
Args:
key (Resource): The resource to check for a match.
mode (string): The mode specified in the rule.
rotation_period (string): The cut off rotation schedule of crypto
key specified in rule file.
Returns:
bool: Returns true if a match is found.
"""
LOGGER.debug('Formatting rotation time...')
creation_time = key.primary_version.get('createTime')
scan_time = date_time.get_utc_now_datetime()
last_rotation_time = creation_time[:-5]
formatted_last_rotation_time = datetime.datetime.strptime(
last_rotation_time, string_formats.TIMESTAMP_MICROS)
days_since_rotated = (scan_time - formatted_last_rotation_time).days
if mode == BLACKLIST and days_since_rotated > rotation_period:
return True
elif mode == WHITELIST and days_since_rotated <= rotation_period:
return True
return False
@classmethod
def find_match_algorithms(cls, key, rule_algorithms):
"""Check if there is a match for this rule algorithm against the given
resource.
Args:
key (Resource): The resource to check for a match.
rule_algorithms (string): The algorithms of this rule.
Returns:
bool: Returns true if a match is found.
"""
LOGGER.debug('Checking if the algorithm specified matches with that of'
' crypto key.')
key_algorithm = key.primary_version.get('algorithm')
for algorithm in rule_algorithms:
if key_algorithm == algorithm:
return True
return False
@classmethod
def find_match_protection_level(cls, key, rule_protection_level):
"""Check if there is a match for this rule protection level against the
given resource.
Args:
key (Resource): The resource to check for a match.
rule_protection_level (string): The protection level of this rule.
Returns:
bool: Returns true if a match is found.
"""
key_protection_level = key.primary_version.get('protectionLevel')
if key_protection_level == rule_protection_level:
return True
return False
@classmethod
def find_match_purpose(cls, key, rule_purpose):
"""Check if there is a match for this rule purpose against the given
resource.
Args:
key (Resource): The resource to check for a match.
rule_purpose (list): The purpose of this rule.
Returns:
bool: Returns true if a match is found.
"""
key_purpose = key.purpose
for purpose in rule_purpose:
if key_purpose == purpose:
return True
return False
@classmethod
def find_match_state(cls, key, rule_state):
"""Check if there is a match for this rule state against the given
resource.
Args:
key (Resource): The resource to check for a match.
rule_state (list): The state of this rule.
Returns:
bool: Returns true if a match is found.
"""
key_state = key.primary_version.get('state')
for state in rule_state:
if state == key_state:
return True
return False
def find_violations(self, key):
"""Find violations for this rule against the given resource.
Args:
key (Resource): The resource to check for violations.
Returns:
list: Returns a list of RuleViolation named tuples.
"""
violations = []
state = key.primary_version.get('state')
if not state == 'ENABLED':
return violations
mode = self.rule['mode']
crypto_key_rule = self.rule['key']
for key_data in crypto_key_rule:
has_violation = False
rule_algorithms = key_data.get('algorithms')
rule_protection_level = key_data.get('protection_level')
rule_purpose = key_data.get('purpose')
rule_state = key_data.get('state')
rotation_period = key_data.get('rotation_period')
all_matched = True
if rotation_period:
all_matched = all_matched and self.find_match_rotation_period(
key, rotation_period, mode)
if rule_algorithms:
all_matched = all_matched and self.find_match_algorithms(
key, rule_algorithms)
if rule_protection_level:
all_matched = all_matched and self.find_match_protection_level(
key, rule_protection_level)
if rule_purpose:
all_matched = all_matched and self.find_match_purpose(
key, rule_purpose)
if rule_state:
all_matched = all_matched and self.find_match_state(
key, rule_state)
if mode == BLACKLIST and all_matched:
has_violation = True
elif mode == WHITELIST and not all_matched:
has_violation = True
if has_violation:
violations.append(RuleViolation(
resource_id=key.id,
resource_type=key.type,
resource_name=key.id,
full_name=key.crypto_key_full_name,
rule_index=self.rule_index,
rule_name=self.rule_name,
violation_type=VIOLATION_TYPE,
primary_version=key.primary_version,
next_rotation_time=key.next_rotation_time,
rotation_period=key.rotation_period,
state=key.primary_version.get('state'),
algorithm=key.primary_version.get('algorithm'),
protection_level=key.primary_version.get('protectionLevel'),
purpose=key.purpose,
key_creation_time=key.create_time,
resource_data=key.data))
return violations
def __eq__(self, other):
"""Test whether Rule equals other Rule.
Args:
other (Rule): object to compare to
Returns:
int: comparison result
"""
if not isinstance(other, type(self)):
return NotImplemented
return (self.rule_name == other.rule_name and
self.rule_index == other.rule_index)
def __ne__(self, other):
"""Test whether Rule is not equal to another Rule.
Args:
other (object): object to compare to
Returns:
int: comparison result
"""
return not self == other
def __hash__(self):
"""Make a hash of the rule index.
Returns:
int: The hash of the rule index.
"""
return hash(self.rule_index)
# pylint: enable=inconsistent-return-statements
# Rule violation.
# resource_type: string
# resource_id: string
# resource_name: string
# primary_version: string
# next_rotation_time: string
# rule_name: string
# rule_index: int
# full_name: string
# violation_type: CRYPTO_KEY_VIOLATION
# state: string
# purpose: string
# algorithm: string
# protection_level: string
# rotation_period: string
# key_creation_time: string
# resource_data: string
RuleViolation = namedtuple('RuleViolation',
['resource_id', 'resource_type', 'resource_name',
'full_name', 'rule_index', 'rule_name',
'violation_type', 'state',
'primary_version', 'next_rotation_time',
'rotation_period', 'key_creation_time',
'algorithm', 'protection_level',
'purpose', 'resource_data'])
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_change_circle_twotone = void 0;
var ic_change_circle_twotone = {
"viewBox": "0 0 24 24",
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M12,4c4.41,0,8,3.59,8,8s-3.59,8-8,8s-8-3.59-8-8S7.59,4,12,4 M12.06,13.34v2.14 c-0.92,0.02-1.84-0.31-2.54-1.01c-1.12-1.12-1.3-2.8-0.59-4.13l-1.1-1.1c-1.28,1.94-1.07,4.59,0.64,6.29C9.44,16.51,10.72,17,12,17 c0.02,0,0.04,0,0.06,0V19l2.83-2.83L12.06,13.34z M15.54,8.46c-0.99-0.99-2.3-1.46-3.6-1.45V5L9.11,7.83l2.83,2.83V8.51 c0.02,0,0.04,0,0.06,0c0.9,0,1.79,0.34,2.48,1.02c1.12,1.12,1.3,2.8,0.59,4.13l1.1,1.1C17.45,12.82,17.24,10.17,15.54,8.46z",
"opacity": ".3"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M12,4c4.41,0,8,3.59,8,8s-3.59,8-8,8s-8-3.59-8-8S7.59,4,12,4 M12,2C6.48,2,2,6.48,2,12s4.48,10,10,10s10-4.48,10-10 S17.52,2,12,2L12,2z M12.06,13.34v2.14c-0.92,0.02-1.84-0.31-2.54-1.01c-1.12-1.12-1.3-2.8-0.59-4.13l-1.1-1.1 c-1.28,1.94-1.07,4.59,0.64,6.29C9.44,16.51,10.72,17,12,17c0.02,0,0.04,0,0.06,0V19l2.83-2.83L12.06,13.34z M15.54,8.46 c-0.99-0.99-2.3-1.46-3.6-1.45V5L9.11,7.83l2.83,2.83V8.51c0.02,0,0.04,0,0.06,0c0.9,0,1.79,0.34,2.48,1.02 c1.12,1.12,1.3,2.8,0.59,4.13l1.1,1.1C17.45,12.82,17.24,10.17,15.54,8.46z"
},
"children": []
}]
};
exports.ic_change_circle_twotone = ic_change_circle_twotone;
|
import { ShaderLib } from 'three';
import BaseAnimationMaterial from './BaseAnimationMaterial';
/**
* Extends THREE.PointsMaterial with custom shader chunks.
*
* @param {Object} parameters Object containing material properties and custom shader chunks.
* @constructor
*/
function PointsAnimationMaterial(parameters) {
this.varyingParameters = [];
this.vertexFunctions = [];
this.vertexParameters = [];
this.vertexInit = [];
this.vertexPosition = [];
this.vertexColor = [];
this.fragmentFunctions = [];
this.fragmentParameters = [];
this.fragmentInit = [];
this.fragmentMap = [];
this.fragmentDiffuse = [];
// use fragment shader to shape to point, reference: https://thebookofshaders.com/07/
this.fragmentShape = [];
BaseAnimationMaterial.call(this, parameters, ShaderLib['points'].uniforms);
this.vertexShader = this.concatVertexShader();
this.fragmentShader = this.concatFragmentShader();
}
PointsAnimationMaterial.prototype = Object.create(BaseAnimationMaterial.prototype);
PointsAnimationMaterial.prototype.constructor = PointsAnimationMaterial;
PointsAnimationMaterial.prototype.concatVertexShader = function () {
return `
uniform float size;
uniform float scale;
#include <common>
#include <color_pars_vertex>
#include <fog_pars_vertex>
#include <shadowmap_pars_vertex>
#include <logdepthbuf_pars_vertex>
#include <clipping_planes_pars_vertex>
${this.stringifyChunk('vertexParameters')}
${this.stringifyChunk('varyingParameters')}
${this.stringifyChunk('vertexFunctions')}
void main() {
${this.stringifyChunk('vertexInit')}
#include <color_vertex>
#include <begin_vertex>
${this.stringifyChunk('vertexPosition')}
${this.stringifyChunk('vertexColor')}
#include <project_vertex>
#ifdef USE_SIZEATTENUATION
gl_PointSize = size * ( scale / - mvPosition.z );
#else
gl_PointSize = size;
#endif
#include <logdepthbuf_vertex>
#include <clipping_planes_vertex>
#include <worldpos_vertex>
#include <shadowmap_vertex>
#include <fog_vertex>
}`;
};
PointsAnimationMaterial.prototype.concatFragmentShader = function () {
return `
uniform vec3 diffuse;
uniform float opacity;
#include <common>
#include <packing>
#include <color_pars_fragment>
#include <map_particle_pars_fragment>
#include <fog_pars_fragment>
#include <shadowmap_pars_fragment>
#include <logdepthbuf_pars_fragment>
#include <clipping_planes_pars_fragment>
${this.stringifyChunk('fragmentParameters')}
${this.stringifyChunk('varyingParameters')}
${this.stringifyChunk('fragmentFunctions')}
void main() {
${this.stringifyChunk('fragmentInit')}
#include <clipping_planes_fragment>
vec3 outgoingLight = vec3( 0.0 );
vec4 diffuseColor = vec4( diffuse, opacity );
${this.stringifyChunk('fragmentDiffuse')}
#include <logdepthbuf_fragment>
${(this.stringifyChunk('fragmentMap') || '#include <map_particle_fragment>')}
#include <color_fragment>
#include <alphatest_fragment>
outgoingLight = diffuseColor.rgb;
gl_FragColor = vec4( outgoingLight, diffuseColor.a );
${this.stringifyChunk('fragmentShape')}
#include <premultiplied_alpha_fragment>
#include <tonemapping_fragment>
#include <encodings_fragment>
#include <fog_fragment>
}`;
};
export { PointsAnimationMaterial };
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Built-in linear model classes."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import tensor_shape
from tensorflow.python.keras import activations
from tensorflow.python.keras import initializers
from tensorflow.python.keras import regularizers
from tensorflow.python.keras.engine import base_layer
from tensorflow.python.keras.engine import input_spec
from tensorflow.python.keras.engine import training
from tensorflow.python.keras.layers import core
from tensorflow.python.ops import nn
from tensorflow.python.util.tf_export import keras_export
@keras_export('keras.experimental.LinearModel')
class LinearModel(training.Model):
r"""Linear Model for regression and classification problems.
This model approximates the following function:
$$y = \beta + \sum_{i=1}^{N} w_{i} * x_{i}$$
where $$\beta$$ is the bias and $$w_{i}$$ is the weight for each feature.
Example:
```python
model = LinearModel()
model.compile(optimizer='sgd', loss='mse')
model.fit(x, y, epochs=epochs)
```
This model accepts sparse float inputs as well:
Example:
```python
model = LinearModel()
opt = tf.keras.optimizers.Adam()
loss_fn = tf.keras.losses.MeanSquaredError()
with tf.GradientTape() as tape:
output = model(sparse_input)
loss = tf.reduce_mean(loss_fn(target, output))
grads = tape.gradient(loss, model.weights)
opt.apply_gradients(zip(grads, model.weights))
```
"""
def __init__(self,
units=1,
activation=None,
use_bias=True,
kernel_initializer='zeros',
bias_initializer='zeros',
kernel_regularizer=None,
bias_regularizer=None,
**kwargs):
"""Create a Linear Model.
Args:
units: Positive integer, output dimension without the batch size.
activation: Activation function to use.
If you don't specify anything, no activation is applied.
use_bias: whether to calculate the bias/intercept for this model. If set
to False, no bias/intercept will be used in calculations, e.g., the data
is already centered.
kernel_initializer: Initializer for the `kernel` weights matrices.
bias_initializer: Initializer for the bias vector.
kernel_regularizer: regularizer for kernel vectors.
bias_regularizer: regularizer for bias vector.
**kwargs: The keyword arguments that are passed on to BaseLayer.__init__.
"""
self.units = units
self.activation = activations.get(activation)
self.use_bias = use_bias
self.kernel_initializer = initializers.get(kernel_initializer)
self.bias_initializer = initializers.get(bias_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.bias_regularizer = regularizers.get(bias_regularizer)
super(LinearModel, self).__init__(**kwargs)
base_layer.keras_premade_model_gauge.get_cell('Linear').set(True)
def build(self, input_shape):
if isinstance(input_shape, dict):
names = sorted(list(input_shape.keys()))
self.input_specs = []
self.dense_layers = []
for name in names:
shape = input_shape[name]
layer = core.Dense(
units=self.units,
use_bias=False,
kernel_initializer=self.kernel_initializer,
kernel_regularizer=self.kernel_regularizer,
name=name)
layer.build(shape)
self.input_specs.append(
input_spec.InputSpec(shape=shape, name=name))
self.dense_layers.append(layer)
elif isinstance(input_shape, (tuple, list)) and all(
isinstance(shape, tensor_shape.TensorShape) for shape in input_shape):
self.dense_layers = []
for shape in input_shape:
layer = core.Dense(
units=self.units,
use_bias=False,
kernel_initializer=self.kernel_initializer,
kernel_regularizer=self.kernel_regularizer)
layer.build(shape)
self.dense_layers.append(layer)
else:
# input_shape can be a single TensorShape or a tuple of ints.
layer = core.Dense(
units=self.units,
use_bias=False,
kernel_initializer=self.kernel_initializer,
kernel_regularizer=self.kernel_regularizer)
layer.build(input_shape)
self.dense_layers = [layer]
if self.use_bias:
self.bias = self.add_weight(
'bias',
shape=self.units,
initializer=self.bias_initializer,
regularizer=self.bias_regularizer,
dtype=self.dtype,
trainable=True)
else:
self.bias = None
self.built = True
def call(self, inputs):
result = None
if isinstance(inputs, dict):
names = [layer.name for layer in self.dense_layers]
different_keys = set(names) - set(inputs.keys())
if different_keys:
raise ValueError(
'The input dictionary does not match '
'the structure expected by the model.'
'\n\tExpected keys: {}'
'\n\tReceived keys: {}'
'\n\tMissing keys: {}'.format(set(names), set(inputs.keys()),
different_keys))
inputs = [inputs[name] for name in names]
for inp, layer in zip(inputs, self.dense_layers):
output = layer(inp)
if result is None:
result = output
else:
result += output
elif isinstance(inputs, (tuple, list)):
for inp, layer in zip(inputs, self.dense_layers):
output = layer(inp)
if result is None:
result = output
else:
result += output
else:
result = self.dense_layers[0](inputs)
if self.use_bias:
result = nn.bias_add(result, self.bias)
if self.activation is not None:
return self.activation(result) # pylint: disable=not-callable
return result
def get_config(self):
config = {
'units': self.units,
'activation': activations.serialize(self.activation),
'use_bias': self.use_bias,
'kernel_initializer': initializers.serialize(self.kernel_initializer),
'bias_initializer': initializers.serialize(self.bias_initializer),
'kernel_regularizer': regularizers.serialize(self.kernel_regularizer),
'bias_regularizer': regularizers.serialize(self.bias_regularizer),
}
base_config = base_layer.Layer.get_config(self)
return dict(list(base_config.items()) + list(config.items()))
@classmethod
def from_config(cls, config, custom_objects=None):
del custom_objects
return cls(**config)
|
import React, { Fragment, useEffect, useReducer, useContext } from 'react';
import PropTypes from 'prop-types';
import { useDispatch, useSelector, shallowEqual } from 'react-redux';
import { Button } from '@patternfly/react-core';
import { sortable, wrappable, cellWidth, breakWord } from '@patternfly/react-table';
import { useIntl } from 'react-intl';
import { CubesIcon, SearchIcon } from '@patternfly/react-icons';
import isEmpty from 'lodash/isEmpty';
import { fetchRequests,
sortRequests,
setFilterValueRequests,
clearFilterValueRequests,
resetRequestList } from '../../redux/actions/request-actions';
import { createRows } from './request-table-helpers';
import { TableToolbarView } from '../../presentational-components/shared/table-toolbar-view';
import { APPROVAL_APPROVER_PERSONA, useIsApprovalAdmin, useIsApprovalApprover } from '../../helpers/shared/helpers';
import { TopToolbar, TopToolbarTitle } from '../../presentational-components/shared/top-toolbar';
import { AppTabs } from '../../smart-components/app-tabs/app-tabs';
import asyncDebounce from '../../utilities/async-debounce';
import { scrollToTop } from '../../helpers/shared/helpers';
import TableEmptyState from '../../presentational-components/shared/table-empty-state';
import UserContext from '../../user-context';
import { prepareChips } from './chips-helpers';
import routes from '../../constants/routes';
import tableToolbarMessages from '../../messages/table-toolbar.messages';
import requestsMessages from '../../messages/requests.messages';
import commonMessages from '../../messages/common.message';
import { Route } from 'react-router-dom';
import routesLinks from '../../constants/routes';
import ActionModal from './action-modal';
const columns = (intl) => [{
title: intl.formatMessage(requestsMessages.requestsIdColumn),
transforms: [ sortable, cellWidth(10) ]
},
{ title: intl.formatMessage(tableToolbarMessages.name), transforms: [ sortable, wrappable, cellWidth(25) ], cellTransforms: [ breakWord ]},
{ title: intl.formatMessage(requestsMessages.requesterColumn), transforms: [ sortable, wrappable, cellWidth(25) ]},
{ title: intl.formatMessage(requestsMessages.updatedColumn), transforms: [ cellWidth(15) ]},
{ title: intl.formatMessage(requestsMessages.statusColumn), transforms: [ sortable, cellWidth(25) ]}
];
const debouncedFilter = asyncDebounce(
(dispatch, filteringCallback, persona, updateFilter) => {
filteringCallback(true);
updateFilter && updateFilter();
return dispatch(fetchRequests(persona)).then(() =>
filteringCallback(false)
);
},
1000
);
const initialState = (nameValue = '', requesterValue = '') => ({
nameValue,
requesterValue,
isOpen: false,
isFetching: true,
isFiltering: false,
rows: []
});
const requestsListState = (state, action) => {
switch (action.type) {
case 'setFetching':
return { ...state, isFetching: action.payload };
case 'setNameValue':
return { ...state, nameValue: action.payload };
case 'setRequesterValue':
return { ...state, requesterValue: action.payload };
case 'clearFilters':
return { ...state, requesterValue: '', nameValue: '', isFetching: true };
case 'setFilteringFlag':
return { ...state, isFiltering: action.payload };
case 'setRows':
return { ...state, rows: action.payload };
default:
return state;
}
};
const RequestsList = ({ persona, indexpath, actionResolver }) => {
const { requests: { data, meta }, sortBy, filterValue } = useSelector(
({ requestReducer: { requests, sortBy, filterValue }}) => ({ requests, sortBy, filterValue }),
shallowEqual
);
const [{ nameValue, isFetching, isFiltering, requesterValue, rows }, stateDispatch ] = useReducer(
requestsListState,
initialState(filterValue.name, filterValue.requester)
);
const { userRoles: userRoles } = useContext(UserContext);
const dispatch = useDispatch();
const intl = useIntl();
const isApprovalAdmin = useIsApprovalAdmin(userRoles);
const isApprovalApprover = useIsApprovalApprover(userRoles);
const noRequestsMessage = () => (indexpath === routesLinks.allrequest) ?
intl.formatMessage(requestsMessages.emptyAllRequestsDescription) : intl.formatMessage(requestsMessages.emptyRequestsDescription);
const updateRequests = (pagination) => {
if (!isApprovalApprover && persona === APPROVAL_APPROVER_PERSONA) {
stateDispatch({ type: 'setFetching', payload: false });
return;
}
stateDispatch({ type: 'setFetching', payload: true });
return dispatch(fetchRequests(persona, pagination))
.then(() => stateDispatch({ type: 'setFetching', payload: false }))
.catch(() => stateDispatch({ type: 'setFetching', payload: false }));
};
const routes = () => <Fragment>
<Route exact path={ routesLinks.requests.comment } render={ props => <ActionModal { ...props }
actionType={ 'Comment' }
postMethod={ () => updateRequests(meta) }
/> }/>
<Route exact path={ routesLinks.requests.approve } render={ props => <ActionModal { ...props } actionType={ 'Approve' }
postMethod={ () => updateRequests(meta) }
/> } />
<Route exact path={ routesLinks.requests.deny } render={ props => <ActionModal { ...props } actionType={ 'Deny' }
postMethod={ () => updateRequests(meta) }
/> } />
</Fragment>;
const resetList = () => {
stateDispatch({ type: 'clearFilters' });
dispatch(clearFilterValueRequests());
dispatch(resetRequestList());
};
useEffect(() => {
resetList();
updateRequests();
scrollToTop();
}, [ persona ]);
useEffect(() => {
stateDispatch({ type: 'setRows', payload: createRows(actionResolver, data, indexpath, intl) });
}, [ data ]);
const handleFilterChange = (value, type) => {
const updateFilter = () => dispatch(setFilterValueRequests(value, type));
let debouncedValue = false;
if (type === 'name') {
stateDispatch({ type: 'setNameValue', payload: value });
debouncedValue = true;
} else if (type === 'requester') {
stateDispatch({ type: 'setRequesterValue', payload: value });
debouncedValue = true;
}
if (!debouncedValue) {
dispatch(setFilterValueRequests(value, type));
}
return debouncedFilter(
dispatch,
(isFiltering) =>
stateDispatch({ type: 'setFilteringFlag', payload: isFiltering }),
persona,
debouncedValue && updateFilter
);
};
const onSort = (_e, index, direction, { property }) => {
stateDispatch({ type: 'setFetching', payload: true });
dispatch(sortRequests({ index, direction, property }));
return updateRequests();
};
const clearFilters = () => {
stateDispatch({ type: 'clearFilters' });
dispatch(clearFilterValueRequests());
return updateRequests();
};
const onDeleteChip = ([{ key, chips: [{ value }] }]) => {
const newValue = [ 'name', 'requester' ].includes(key) ? '' : filterValue[key].filter(val => value !== val);
handleFilterChange(newValue, key);
};
return (
<Fragment>
<TopToolbar>
<TopToolbarTitle title={ intl.formatMessage(commonMessages.approvalTitle) }/>
{ isApprovalAdmin && <AppTabs/> }
</TopToolbar>
<TableToolbarView
ouiaId={ 'requests-table' }
sortBy={ sortBy }
onSort={ onSort }
rows={ rows }
columns={ columns(intl) }
fetchData={ updateRequests }
routes={ routes }
titlePlural={ intl.formatMessage(requestsMessages.requests) }
titleSingular={ intl.formatMessage(requestsMessages.request) }
pagination={ meta }
handlePagination={ updateRequests }
filterValue={ nameValue }
onFilterChange={ (value) => handleFilterChange(value, 'name') }
isLoading={ isFetching || isFiltering }
renderEmptyState={ () => (
<TableEmptyState
title={ isEmpty(filterValue)
? intl.formatMessage(requestsMessages.emptyRequestsTitle)
: intl.formatMessage(tableToolbarMessages.noResultsFound)
}
icon={ isEmpty(filterValue) ? CubesIcon : SearchIcon }
PrimaryAction={ () =>
isEmpty(filterValue) ? noRequestsMessage() : (
<Button onClick={ clearFilters } variant="link" ouiaId={ `clear-filter-requests` }>
{ intl.formatMessage(tableToolbarMessages.clearAllFilters) }
</Button>
)
}
description={
isEmpty(filterValue)
? ''
: intl.formatMessage(tableToolbarMessages.clearAllFiltersDescription)
}
isSearch={ !isEmpty(filterValue) }
/>
) }
activeFiltersConfig={ {
filters: prepareChips({ name: nameValue, requester: requesterValue, decision: filterValue.decision }, intl),
onDelete: (_e, chip, deleteAll) => deleteAll ? clearFilters() : onDeleteChip(chip)
} }
filterConfig={ [
{
label: intl.formatMessage(requestsMessages.requesterColumn),
filterValues: {
placeholder: intl.formatMessage(
tableToolbarMessages.filterByTitle,
{ title: intl.formatMessage(requestsMessages.requesterColumn).toLowerCase() }
),
'aria-label': intl.formatMessage(
tableToolbarMessages.filterByTitle,
{ title: intl.formatMessage(requestsMessages.requesterColumn).toLowerCase() }
),
onChange: (_event, value) => handleFilterChange(value, 'requester'),
value: requesterValue
}
}, {
label: intl.formatMessage(requestsMessages.statusColumn),
type: 'checkbox',
filterValues: {
placeholder: intl.formatMessage(
tableToolbarMessages.filterByTitle,
{ title: intl.formatMessage(requestsMessages.statusColumn).toLowerCase() }
),
'aria-label': intl.formatMessage(
tableToolbarMessages.filterByTitle,
{ title: intl.formatMessage(requestsMessages.statusColumn).toLowerCase() }
),
onChange: (_event, value) => handleFilterChange(value, 'decision'),
value: filterValue.decision,
items: [ 'approved', 'canceled', 'denied', 'error', 'undecided' ].map((state) => ({
label: intl.formatMessage(requestsMessages[state]),
value: state
}))
}
}
] }
/>
</Fragment>);
};
RequestsList.propTypes = {
routes: PropTypes.func,
persona: PropTypes.string,
indexpath: PropTypes.shape ({ index: PropTypes.string }),
actionResolver: PropTypes.func
};
RequestsList.defaultProps = {
indexpath: routes.request,
actionResolver: () => false
};
export default RequestsList;
|
/*
Copyright 2003, 2004, 2005, 2006 PathScale, Inc. All Rights Reserved.
File modified October 3, 2003 by PathScale, Inc. to update Open64 C/C++
front-ends to GNU 3.3.1 release.
*/
/* Output Dwarf2 format symbol table information from the GNU C compiler.
Copyright (C) 1992, 1993, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002
Free Software Foundation, Inc.
Contributed by Gary Funck (gary@intrepid.com).
Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
Extensively modified by Jason Merrill (jason@cygnus.com).
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2, or (at your option) any later
version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to the Free
Software Foundation, 59 Temple Place - Suite 330, Boston, MA
02111-1307, USA. */
/* TODO: Emit .debug_line header even when there are no functions, since
the file numbers are used by .debug_info. Alternately, leave
out locations for types and decls.
Avoid talking about ctors and op= for PODs.
Factor out common prologue sequences into multiple CIEs. */
/* The first part of this file deals with the DWARF 2 frame unwind
information, which is also used by the GCC efficient exception handling
mechanism. The second part, controlled only by an #ifdef
DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
information. */
#include "config.h"
#include "system.h"
#ifdef SGI_MONGOOSE
// To get typdef tree
#include "rtl.h"
#endif /* SGI_MONGOOSE */
#include "tree.h"
#include "flags.h"
#include "real.h"
#ifndef SGI_MONGOOSE
#include "rtl.h"
#endif /* SGI_MONGOOSE */
#include "hard-reg-set.h"
#include "regs.h"
#include "insn-config.h"
#include "reload.h"
#include "function.h"
#include "output.h"
#include "expr.h"
#include "libfuncs.h"
#include "except.h"
#include "dwarf2.h"
#include "dwarf2out.h"
#include "dwarf2asm.h"
#include "toplev.h"
#include "varray.h"
#include "ggc.h"
#include "md5.h"
#include "tm_p.h"
#include "diagnostic.h"
#include "debug.h"
#include "target.h"
#include "langhooks.h"
#include "hashtable.h"
#include "hashtab.h"
#ifdef SGI_MONGOOSE
// To define DWARF2_UNWIND_INFO
#include "defaults.h"
#include "wfe_dst.h"
#endif /* SGI_MONGOOSE */
#ifdef DWARF2_DEBUGGING_INFO
static void dwarf2out_source_line PARAMS ((unsigned int, const char *));
#endif
/* DWARF2 Abbreviation Glossary:
CFA = Canonical Frame Address
a fixed address on the stack which identifies a call frame.
We define it to be the value of SP just before the call insn.
The CFA register and offset, which may change during the course
of the function, are used to calculate its value at runtime.
CFI = Call Frame Instruction
an instruction for the DWARF2 abstract machine
CIE = Common Information Entry
information describing information common to one or more FDEs
DIE = Debugging Information Entry
FDE = Frame Description Entry
information describing the stack call frame, in particular,
how to restore registers
DW_CFA_... = DWARF2 CFA call frame instruction
DW_TAG_... = DWARF2 DIE tag */
/* Decide whether we want to emit frame unwind information for the current
translation unit. */
int
dwarf2out_do_frame ()
{
return (write_symbols == DWARF2_DEBUG
|| write_symbols == VMS_AND_DWARF2_DEBUG
#ifdef DWARF2_FRAME_INFO
|| DWARF2_FRAME_INFO
#endif
#ifdef DWARF2_UNWIND_INFO
|| flag_unwind_tables
|| (flag_exceptions && ! USING_SJLJ_EXCEPTIONS)
#endif
);
}
/* The size of the target's pointer type. */
#ifndef PTR_SIZE
#define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
#endif
/* Default version of targetm.eh_frame_section. Note this must appear
outside the DWARF2_DEBUGGING_INFO || DWARF2_UNWIND_INFO macro
guards. */
void
default_eh_frame_section ()
{
#ifdef EH_FRAME_SECTION_NAME
#ifdef HAVE_LD_RO_RW_SECTION_MIXING
int fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
int per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
int lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
int flags;
flags = (! flag_pic
|| ((fde_encoding & 0x70) != DW_EH_PE_absptr
&& (fde_encoding & 0x70) != DW_EH_PE_aligned
&& (per_encoding & 0x70) != DW_EH_PE_absptr
&& (per_encoding & 0x70) != DW_EH_PE_aligned
&& (lsda_encoding & 0x70) != DW_EH_PE_absptr
&& (lsda_encoding & 0x70) != DW_EH_PE_aligned))
? 0 : SECTION_WRITE;
named_section_flags (EH_FRAME_SECTION_NAME, flags);
#else
named_section_flags (EH_FRAME_SECTION_NAME, SECTION_WRITE);
#endif
#else
tree label = get_file_function_name ('F');
data_section ();
ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
(*targetm.asm_out.globalize_label) (asm_out_file, IDENTIFIER_POINTER (label));
ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
#endif
}
/* Array of RTXes referenced by the debugging information, which therefore
must be kept around forever. */
static GTY(()) varray_type used_rtx_varray;
/* A pointer to the base of a list of incomplete types which might be
completed at some later time. incomplete_types_list needs to be a VARRAY
because we want to tell the garbage collector about it. */
static GTY(()) varray_type incomplete_types;
/* A pointer to the base of a table of references to declaration
scopes. This table is a display which tracks the nesting
of declaration scopes at the current scope and containing
scopes. This table is used to find the proper place to
define type declaration DIE's. */
static GTY(()) varray_type decl_scope_table;
#if defined (DWARF2_DEBUGGING_INFO) || defined (DWARF2_UNWIND_INFO)
/* How to start an assembler comment. */
#ifndef ASM_COMMENT_START
#define ASM_COMMENT_START ";#"
#endif
typedef struct dw_cfi_struct *dw_cfi_ref;
typedef struct dw_fde_struct *dw_fde_ref;
typedef union dw_cfi_oprnd_struct *dw_cfi_oprnd_ref;
/* Call frames are described using a sequence of Call Frame
Information instructions. The register number, offset
and address fields are provided as possible operands;
their use is selected by the opcode field. */
typedef union dw_cfi_oprnd_struct
{
unsigned long dw_cfi_reg_num;
long int dw_cfi_offset;
const char *dw_cfi_addr;
struct dw_loc_descr_struct *dw_cfi_loc;
}
dw_cfi_oprnd;
typedef struct dw_cfi_struct
{
dw_cfi_ref dw_cfi_next;
enum dwarf_call_frame_info dw_cfi_opc;
dw_cfi_oprnd dw_cfi_oprnd1;
dw_cfi_oprnd dw_cfi_oprnd2;
}
dw_cfi_node;
/* This is how we define the location of the CFA. We use to handle it
as REG + OFFSET all the time, but now it can be more complex.
It can now be either REG + CFA_OFFSET or *(REG + BASE_OFFSET) + CFA_OFFSET.
Instead of passing around REG and OFFSET, we pass a copy
of this structure. */
typedef struct cfa_loc
{
unsigned long reg;
long offset;
long base_offset;
int indirect; /* 1 if CFA is accessed via a dereference. */
} dw_cfa_location;
/* All call frame descriptions (FDE's) in the GCC generated DWARF
refer to a single Common Information Entry (CIE), defined at
the beginning of the .debug_frame section. This use of a single
CIE obviates the need to keep track of multiple CIE's
in the DWARF generation routines below. */
typedef struct dw_fde_struct
{
const char *dw_fde_begin;
const char *dw_fde_current_label;
const char *dw_fde_end;
dw_cfi_ref dw_fde_cfi;
unsigned funcdef_number;
unsigned all_throwers_are_sibcalls : 1;
unsigned nothrow : 1;
unsigned uses_eh_lsda : 1;
}
dw_fde_node;
/* Maximum size (in bytes) of an artificially generated label. */
#define MAX_ARTIFICIAL_LABEL_BYTES 30
/* The size of addresses as they appear in the Dwarf 2 data.
Some architectures use word addresses to refer to code locations,
but Dwarf 2 info always uses byte addresses. On such machines,
Dwarf 2 addresses need to be larger than the architecture's
pointers. */
#ifndef DWARF2_ADDR_SIZE
#define DWARF2_ADDR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
#endif
/* The size in bytes of a DWARF field indicating an offset or length
relative to a debug info section, specified to be 4 bytes in the
DWARF-2 specification. The SGI/MIPS ABI defines it to be the same
as PTR_SIZE. */
#ifndef DWARF_OFFSET_SIZE
#define DWARF_OFFSET_SIZE 4
#endif
#define DWARF_VERSION 2
/* Round SIZE up to the nearest BOUNDARY. */
#define DWARF_ROUND(SIZE,BOUNDARY) \
((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
/* Offsets recorded in opcodes are a multiple of this alignment factor. */
#ifndef DWARF_CIE_DATA_ALIGNMENT
#ifdef STACK_GROWS_DOWNWARD
#define DWARF_CIE_DATA_ALIGNMENT (-((int) UNITS_PER_WORD))
#else
#define DWARF_CIE_DATA_ALIGNMENT ((int) UNITS_PER_WORD)
#endif
#endif
/* A pointer to the base of a table that contains frame description
information for each routine. */
static dw_fde_ref fde_table;
/* Number of elements currently allocated for fde_table. */
static unsigned fde_table_allocated;
/* Number of elements in fde_table currently in use. */
static unsigned fde_table_in_use;
/* Size (in elements) of increments by which we may expand the
fde_table. */
#define FDE_TABLE_INCREMENT 256
/* A list of call frame insns for the CIE. */
static dw_cfi_ref cie_cfi_head;
/* Some DWARF extensions (e.g., MIPS/SGI) implement a subprogram
attribute that accelerates the lookup of the FDE associated
with the subprogram. This variable holds the table index of the FDE
associated with the current function (body) definition. */
static unsigned current_funcdef_fde;
struct ht *debug_str_hash;
struct indirect_string_node
{
struct ht_identifier id;
unsigned int refcount;
unsigned int form;
char *label;
};
/* Forward declarations for functions defined in this file. */
static char *stripattributes PARAMS ((const char *));
static const char *dwarf_cfi_name PARAMS ((unsigned));
static dw_cfi_ref new_cfi PARAMS ((void));
static void add_cfi PARAMS ((dw_cfi_ref *, dw_cfi_ref));
static void add_fde_cfi PARAMS ((const char *, dw_cfi_ref));
static void lookup_cfa_1 PARAMS ((dw_cfi_ref,
dw_cfa_location *));
static void lookup_cfa PARAMS ((dw_cfa_location *));
static void reg_save PARAMS ((const char *, unsigned,
unsigned, long));
static void initial_return_save PARAMS ((rtx));
static long stack_adjust_offset PARAMS ((rtx));
static void output_cfi PARAMS ((dw_cfi_ref, dw_fde_ref, int));
static void output_call_frame_info PARAMS ((int));
static void dwarf2out_stack_adjust PARAMS ((rtx));
static void queue_reg_save PARAMS ((const char *, rtx, long));
static void flush_queued_reg_saves PARAMS ((void));
static bool clobbers_queued_reg_save PARAMS ((rtx));
static void dwarf2out_frame_debug_expr PARAMS ((rtx, const char *));
/* Support for complex CFA locations. */
static void output_cfa_loc PARAMS ((dw_cfi_ref));
static void get_cfa_from_loc_descr PARAMS ((dw_cfa_location *,
struct dw_loc_descr_struct *));
static struct dw_loc_descr_struct *build_cfa_loc
PARAMS ((dw_cfa_location *));
static void def_cfa_1 PARAMS ((const char *,
dw_cfa_location *));
/* How to start an assembler comment. */
#ifndef ASM_COMMENT_START
#define ASM_COMMENT_START ";#"
#endif
/* Data and reference forms for relocatable data. */
#define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
#define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
#ifndef DEBUG_FRAME_SECTION
#define DEBUG_FRAME_SECTION ".debug_frame"
#endif
#ifndef FUNC_BEGIN_LABEL
#define FUNC_BEGIN_LABEL "LFB"
#endif
#ifndef FUNC_END_LABEL
#define FUNC_END_LABEL "LFE"
#endif
#define FRAME_BEGIN_LABEL "Lframe"
#define CIE_AFTER_SIZE_LABEL "LSCIE"
#define CIE_END_LABEL "LECIE"
#define FDE_LABEL "LSFDE"
#define FDE_AFTER_SIZE_LABEL "LASFDE"
#define FDE_END_LABEL "LEFDE"
#define LINE_NUMBER_BEGIN_LABEL "LSLT"
#define LINE_NUMBER_END_LABEL "LELT"
#define LN_PROLOG_AS_LABEL "LASLTP"
#define LN_PROLOG_END_LABEL "LELTP"
#define DIE_LABEL_PREFIX "DW"
/* The DWARF 2 CFA column which tracks the return address. Normally this
is the column for PC, or the first column after all of the hard
registers. */
#ifndef DWARF_FRAME_RETURN_COLUMN
#ifdef PC_REGNUM
#define DWARF_FRAME_RETURN_COLUMN DWARF_FRAME_REGNUM (PC_REGNUM)
#else
#define DWARF_FRAME_RETURN_COLUMN DWARF_FRAME_REGISTERS
#endif
#endif
/* The mapping from gcc register number to DWARF 2 CFA column number. By
default, we just provide columns for all registers. */
#ifndef DWARF_FRAME_REGNUM
#define DWARF_FRAME_REGNUM(REG) DBX_REGISTER_NUMBER (REG)
#endif
/* The offset from the incoming value of %sp to the top of the stack frame
for the current function. */
#ifndef INCOMING_FRAME_SP_OFFSET
#define INCOMING_FRAME_SP_OFFSET 0
#endif
/* Hook used by __throw. */
rtx
expand_builtin_dwarf_sp_column ()
{
return GEN_INT (DWARF_FRAME_REGNUM (STACK_POINTER_REGNUM));
}
/* Return a pointer to a copy of the section string name S with all
attributes stripped off, and an asterisk prepended (for assemble_name). */
static inline char *
stripattributes (s)
const char *s;
{
char *stripped = xmalloc (strlen (s) + 2);
char *p = stripped;
*p++ = '*';
while (*s && *s != ',')
*p++ = *s++;
*p = '\0';
return stripped;
}
/* Generate code to initialize the register size table. */
void
expand_builtin_init_dwarf_reg_sizes (address)
tree address;
{
int i;
enum machine_mode mode = TYPE_MODE (char_type_node);
rtx addr = expand_expr (address, NULL_RTX, VOIDmode, 0);
rtx mem = gen_rtx_MEM (BLKmode, addr);
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (DWARF_FRAME_REGNUM (i) < DWARF_FRAME_REGISTERS)
{
HOST_WIDE_INT offset = DWARF_FRAME_REGNUM (i) * GET_MODE_SIZE (mode);
HOST_WIDE_INT size = GET_MODE_SIZE (reg_raw_mode[i]);
if (offset < 0)
continue;
emit_move_insn (adjust_address (mem, mode, offset), GEN_INT (size));
}
}
/* Convert a DWARF call frame info. operation to its string name */
static const char *
dwarf_cfi_name (cfi_opc)
unsigned cfi_opc;
{
switch (cfi_opc)
{
case DW_CFA_advance_loc:
return "DW_CFA_advance_loc";
case DW_CFA_offset:
return "DW_CFA_offset";
case DW_CFA_restore:
return "DW_CFA_restore";
case DW_CFA_nop:
return "DW_CFA_nop";
case DW_CFA_set_loc:
return "DW_CFA_set_loc";
case DW_CFA_advance_loc1:
return "DW_CFA_advance_loc1";
case DW_CFA_advance_loc2:
return "DW_CFA_advance_loc2";
case DW_CFA_advance_loc4:
return "DW_CFA_advance_loc4";
case DW_CFA_offset_extended:
return "DW_CFA_offset_extended";
case DW_CFA_restore_extended:
return "DW_CFA_restore_extended";
case DW_CFA_undefined:
return "DW_CFA_undefined";
case DW_CFA_same_value:
return "DW_CFA_same_value";
case DW_CFA_register:
return "DW_CFA_register";
case DW_CFA_remember_state:
return "DW_CFA_remember_state";
case DW_CFA_restore_state:
return "DW_CFA_restore_state";
case DW_CFA_def_cfa:
return "DW_CFA_def_cfa";
case DW_CFA_def_cfa_register:
return "DW_CFA_def_cfa_register";
case DW_CFA_def_cfa_offset:
return "DW_CFA_def_cfa_offset";
/* DWARF 3 */
case DW_CFA_def_cfa_expression:
return "DW_CFA_def_cfa_expression";
case DW_CFA_expression:
return "DW_CFA_expression";
case DW_CFA_offset_extended_sf:
return "DW_CFA_offset_extended_sf";
case DW_CFA_def_cfa_sf:
return "DW_CFA_def_cfa_sf";
case DW_CFA_def_cfa_offset_sf:
return "DW_CFA_def_cfa_offset_sf";
/* SGI/MIPS specific */
case DW_CFA_MIPS_advance_loc8:
return "DW_CFA_MIPS_advance_loc8";
/* GNU extensions */
case DW_CFA_GNU_window_save:
return "DW_CFA_GNU_window_save";
case DW_CFA_GNU_args_size:
return "DW_CFA_GNU_args_size";
case DW_CFA_GNU_negative_offset_extended:
return "DW_CFA_GNU_negative_offset_extended";
default:
return "DW_CFA_<unknown>";
}
}
/* Return a pointer to a newly allocated Call Frame Instruction. */
static inline dw_cfi_ref
new_cfi ()
{
dw_cfi_ref cfi = (dw_cfi_ref) xmalloc (sizeof (dw_cfi_node));
cfi->dw_cfi_next = NULL;
cfi->dw_cfi_oprnd1.dw_cfi_reg_num = 0;
cfi->dw_cfi_oprnd2.dw_cfi_reg_num = 0;
return cfi;
}
/* Add a Call Frame Instruction to list of instructions. */
static inline void
add_cfi (list_head, cfi)
dw_cfi_ref *list_head;
dw_cfi_ref cfi;
{
dw_cfi_ref *p;
/* Find the end of the chain. */
for (p = list_head; (*p) != NULL; p = &(*p)->dw_cfi_next)
;
*p = cfi;
}
/* Generate a new label for the CFI info to refer to. */
char *
dwarf2out_cfi_label ()
{
static char label[20];
static unsigned long label_num = 0;
ASM_GENERATE_INTERNAL_LABEL (label, "LCFI", label_num++);
ASM_OUTPUT_LABEL (asm_out_file, label);
return label;
}
/* Add CFI to the current fde at the PC value indicated by LABEL if specified,
or to the CIE if LABEL is NULL. */
static void
add_fde_cfi (label, cfi)
const char *label;
dw_cfi_ref cfi;
{
if (label)
{
dw_fde_ref fde = &fde_table[fde_table_in_use - 1];
if (*label == 0)
label = dwarf2out_cfi_label ();
if (fde->dw_fde_current_label == NULL
|| strcmp (label, fde->dw_fde_current_label) != 0)
{
dw_cfi_ref xcfi;
fde->dw_fde_current_label = label = xstrdup (label);
/* Set the location counter to the new label. */
xcfi = new_cfi ();
xcfi->dw_cfi_opc = DW_CFA_advance_loc4;
xcfi->dw_cfi_oprnd1.dw_cfi_addr = label;
add_cfi (&fde->dw_fde_cfi, xcfi);
}
add_cfi (&fde->dw_fde_cfi, cfi);
}
else
add_cfi (&cie_cfi_head, cfi);
}
/* Subroutine of lookup_cfa. */
static inline void
lookup_cfa_1 (cfi, loc)
dw_cfi_ref cfi;
dw_cfa_location *loc;
{
switch (cfi->dw_cfi_opc)
{
case DW_CFA_def_cfa_offset:
loc->offset = cfi->dw_cfi_oprnd1.dw_cfi_offset;
break;
case DW_CFA_def_cfa_register:
loc->reg = cfi->dw_cfi_oprnd1.dw_cfi_reg_num;
break;
case DW_CFA_def_cfa:
loc->reg = cfi->dw_cfi_oprnd1.dw_cfi_reg_num;
loc->offset = cfi->dw_cfi_oprnd2.dw_cfi_offset;
break;
case DW_CFA_def_cfa_expression:
get_cfa_from_loc_descr (loc, cfi->dw_cfi_oprnd1.dw_cfi_loc);
break;
default:
break;
}
}
/* Find the previous value for the CFA. */
static void
lookup_cfa (loc)
dw_cfa_location *loc;
{
dw_cfi_ref cfi;
loc->reg = (unsigned long) -1;
loc->offset = 0;
loc->indirect = 0;
loc->base_offset = 0;
for (cfi = cie_cfi_head; cfi; cfi = cfi->dw_cfi_next)
lookup_cfa_1 (cfi, loc);
if (fde_table_in_use)
{
dw_fde_ref fde = &fde_table[fde_table_in_use - 1];
for (cfi = fde->dw_fde_cfi; cfi; cfi = cfi->dw_cfi_next)
lookup_cfa_1 (cfi, loc);
}
}
/* The current rule for calculating the DWARF2 canonical frame address. */
static dw_cfa_location cfa;
/* The register used for saving registers to the stack, and its offset
from the CFA. */
static dw_cfa_location cfa_store;
/* The running total of the size of arguments pushed onto the stack. */
static long args_size;
/* The last args_size we actually output. */
static long old_args_size;
/* Entry point to update the canonical frame address (CFA).
LABEL is passed to add_fde_cfi. The value of CFA is now to be
calculated from REG+OFFSET. */
void
dwarf2out_def_cfa (label, reg, offset)
const char *label;
unsigned reg;
long offset;
{
dw_cfa_location loc;
loc.indirect = 0;
loc.base_offset = 0;
loc.reg = reg;
loc.offset = offset;
def_cfa_1 (label, &loc);
}
/* This routine does the actual work. The CFA is now calculated from
the dw_cfa_location structure. */
static void
def_cfa_1 (label, loc_p)
const char *label;
dw_cfa_location *loc_p;
{
dw_cfi_ref cfi;
dw_cfa_location old_cfa, loc;
cfa = *loc_p;
loc = *loc_p;
if (cfa_store.reg == loc.reg && loc.indirect == 0)
cfa_store.offset = loc.offset;
loc.reg = DWARF_FRAME_REGNUM (loc.reg);
lookup_cfa (&old_cfa);
/* If nothing changed, no need to issue any call frame instructions. */
if (loc.reg == old_cfa.reg && loc.offset == old_cfa.offset
&& loc.indirect == old_cfa.indirect
&& (loc.indirect == 0 || loc.base_offset == old_cfa.base_offset))
return;
cfi = new_cfi ();
if (loc.reg == old_cfa.reg && !loc.indirect)
{
/* Construct a "DW_CFA_def_cfa_offset <offset>" instruction,
indicating the CFA register did not change but the offset
did. */
cfi->dw_cfi_opc = DW_CFA_def_cfa_offset;
cfi->dw_cfi_oprnd1.dw_cfi_offset = loc.offset;
}
#ifndef MIPS_DEBUGGING_INFO /* SGI dbx thinks this means no offset. */
else if (loc.offset == old_cfa.offset && old_cfa.reg != (unsigned long) -1
&& !loc.indirect)
{
/* Construct a "DW_CFA_def_cfa_register <register>" instruction,
indicating the CFA register has changed to <register> but the
offset has not changed. */
cfi->dw_cfi_opc = DW_CFA_def_cfa_register;
cfi->dw_cfi_oprnd1.dw_cfi_reg_num = loc.reg;
}
#endif
else if (loc.indirect == 0)
{
/* Construct a "DW_CFA_def_cfa <register> <offset>" instruction,
indicating the CFA register has changed to <register> with
the specified offset. */
cfi->dw_cfi_opc = DW_CFA_def_cfa;
cfi->dw_cfi_oprnd1.dw_cfi_reg_num = loc.reg;
cfi->dw_cfi_oprnd2.dw_cfi_offset = loc.offset;
}
else
{
/* Construct a DW_CFA_def_cfa_expression instruction to
calculate the CFA using a full location expression since no
register-offset pair is available. */
struct dw_loc_descr_struct *loc_list;
cfi->dw_cfi_opc = DW_CFA_def_cfa_expression;
loc_list = build_cfa_loc (&loc);
cfi->dw_cfi_oprnd1.dw_cfi_loc = loc_list;
}
add_fde_cfi (label, cfi);
}
/* Add the CFI for saving a register. REG is the CFA column number.
LABEL is passed to add_fde_cfi.
If SREG is -1, the register is saved at OFFSET from the CFA;
otherwise it is saved in SREG. */
static void
reg_save (label, reg, sreg, offset)
const char *label;
unsigned reg;
unsigned sreg;
long offset;
{
dw_cfi_ref cfi = new_cfi ();
cfi->dw_cfi_oprnd1.dw_cfi_reg_num = reg;
/* The following comparison is correct. -1 is used to indicate that
the value isn't a register number. */
if (sreg == (unsigned int) -1)
{
if (reg & ~0x3f)
/* The register number won't fit in 6 bits, so we have to use
the long form. */
cfi->dw_cfi_opc = DW_CFA_offset_extended;
else
cfi->dw_cfi_opc = DW_CFA_offset;
#ifdef ENABLE_CHECKING
{
/* If we get an offset that is not a multiple of
DWARF_CIE_DATA_ALIGNMENT, there is either a bug in the
definition of DWARF_CIE_DATA_ALIGNMENT, or a bug in the machine
description. */
long check_offset = offset / DWARF_CIE_DATA_ALIGNMENT;
if (check_offset * DWARF_CIE_DATA_ALIGNMENT != offset)
abort ();
}
#endif
offset /= DWARF_CIE_DATA_ALIGNMENT;
if (offset < 0)
cfi->dw_cfi_opc = DW_CFA_offset_extended_sf;
cfi->dw_cfi_oprnd2.dw_cfi_offset = offset;
}
else if (sreg == reg)
/* We could emit a DW_CFA_same_value in this case, but don't bother. */
return;
else
{
cfi->dw_cfi_opc = DW_CFA_register;
cfi->dw_cfi_oprnd2.dw_cfi_reg_num = sreg;
}
add_fde_cfi (label, cfi);
}
/* Add the CFI for saving a register window. LABEL is passed to reg_save.
This CFI tells the unwinder that it needs to restore the window registers
from the previous frame's window save area.
??? Perhaps we should note in the CIE where windows are saved (instead of
assuming 0(cfa)) and what registers are in the window. */
void
dwarf2out_window_save (label)
const char *label;
{
dw_cfi_ref cfi = new_cfi ();
cfi->dw_cfi_opc = DW_CFA_GNU_window_save;
add_fde_cfi (label, cfi);
}
/* Add a CFI to update the running total of the size of arguments
pushed onto the stack. */
void
dwarf2out_args_size (label, size)
const char *label;
long size;
{
dw_cfi_ref cfi;
if (size == old_args_size)
return;
old_args_size = size;
cfi = new_cfi ();
cfi->dw_cfi_opc = DW_CFA_GNU_args_size;
cfi->dw_cfi_oprnd1.dw_cfi_offset = size;
add_fde_cfi (label, cfi);
}
/* Entry point for saving a register to the stack. REG is the GCC register
number. LABEL and OFFSET are passed to reg_save. */
void
dwarf2out_reg_save (label, reg, offset)
const char *label;
unsigned reg;
long offset;
{
reg_save (label, DWARF_FRAME_REGNUM (reg), -1, offset);
}
/* Entry point for saving the return address in the stack.
LABEL and OFFSET are passed to reg_save. */
void
dwarf2out_return_save (label, offset)
const char *label;
long offset;
{
reg_save (label, DWARF_FRAME_RETURN_COLUMN, -1, offset);
}
/* Entry point for saving the return address in a register.
LABEL and SREG are passed to reg_save. */
void
dwarf2out_return_reg (label, sreg)
const char *label;
unsigned sreg;
{
reg_save (label, DWARF_FRAME_RETURN_COLUMN, sreg, 0);
}
/* Record the initial position of the return address. RTL is
INCOMING_RETURN_ADDR_RTX. */
static void
initial_return_save (rtl)
rtx rtl;
{
unsigned int reg = (unsigned int) -1;
HOST_WIDE_INT offset = 0;
switch (GET_CODE (rtl))
{
case REG:
/* RA is in a register. */
reg = DWARF_FRAME_REGNUM (REGNO (rtl));
break;
case MEM:
/* RA is on the stack. */
rtl = XEXP (rtl, 0);
switch (GET_CODE (rtl))
{
case REG:
if (REGNO (rtl) != STACK_POINTER_REGNUM)
abort ();
offset = 0;
break;
case PLUS:
if (REGNO (XEXP (rtl, 0)) != STACK_POINTER_REGNUM)
abort ();
offset = INTVAL (XEXP (rtl, 1));
break;
case MINUS:
if (REGNO (XEXP (rtl, 0)) != STACK_POINTER_REGNUM)
abort ();
offset = -INTVAL (XEXP (rtl, 1));
break;
default:
abort ();
}
break;
case PLUS:
/* The return address is at some offset from any value we can
actually load. For instance, on the SPARC it is in %i7+8. Just
ignore the offset for now; it doesn't matter for unwinding frames. */
if (GET_CODE (XEXP (rtl, 1)) != CONST_INT)
abort ();
initial_return_save (XEXP (rtl, 0));
return;
default:
abort ();
}
reg_save (NULL, DWARF_FRAME_RETURN_COLUMN, reg, offset - cfa.offset);
}
/* Given a SET, calculate the amount of stack adjustment it
contains. */
static long
stack_adjust_offset (pattern)
rtx pattern;
{
rtx src = SET_SRC (pattern);
rtx dest = SET_DEST (pattern);
HOST_WIDE_INT offset = 0;
enum rtx_code code;
if (dest == stack_pointer_rtx)
{
/* (set (reg sp) (plus (reg sp) (const_int))) */
code = GET_CODE (src);
if (! (code == PLUS || code == MINUS)
|| XEXP (src, 0) != stack_pointer_rtx
|| GET_CODE (XEXP (src, 1)) != CONST_INT)
return 0;
offset = INTVAL (XEXP (src, 1));
if (code == PLUS)
offset = -offset;
}
else if (GET_CODE (dest) == MEM)
{
/* (set (mem (pre_dec (reg sp))) (foo)) */
src = XEXP (dest, 0);
code = GET_CODE (src);
switch (code)
{
case PRE_MODIFY:
case POST_MODIFY:
if (XEXP (src, 0) == stack_pointer_rtx)
{
rtx val = XEXP (XEXP (src, 1), 1);
/* We handle only adjustments by constant amount. */
if (GET_CODE (XEXP (src, 1)) != PLUS ||
GET_CODE (val) != CONST_INT)
abort ();
offset = -INTVAL (val);
break;
}
return 0;
case PRE_DEC:
case POST_DEC:
if (XEXP (src, 0) == stack_pointer_rtx)
{
offset = GET_MODE_SIZE (GET_MODE (dest));
break;
}
return 0;
case PRE_INC:
case POST_INC:
if (XEXP (src, 0) == stack_pointer_rtx)
{
offset = -GET_MODE_SIZE (GET_MODE (dest));
break;
}
return 0;
default:
return 0;
}
}
else
return 0;
return offset;
}
/* Check INSN to see if it looks like a push or a stack adjustment, and
make a note of it if it does. EH uses this information to find out how
much extra space it needs to pop off the stack. */
static void
dwarf2out_stack_adjust (insn)
rtx insn;
{
HOST_WIDE_INT offset;
const char *label;
int i;
if (!flag_asynchronous_unwind_tables && GET_CODE (insn) == CALL_INSN)
{
/* Extract the size of the args from the CALL rtx itself. */
insn = PATTERN (insn);
if (GET_CODE (insn) == PARALLEL)
insn = XVECEXP (insn, 0, 0);
if (GET_CODE (insn) == SET)
insn = SET_SRC (insn);
if (GET_CODE (insn) != CALL)
abort ();
dwarf2out_args_size ("", INTVAL (XEXP (insn, 1)));
return;
}
/* If only calls can throw, and we have a frame pointer,
save up adjustments until we see the CALL_INSN. */
else if (!flag_asynchronous_unwind_tables && cfa.reg != STACK_POINTER_REGNUM)
return;
if (GET_CODE (insn) == BARRIER)
{
/* When we see a BARRIER, we know to reset args_size to 0. Usually
the compiler will have already emitted a stack adjustment, but
doesn't bother for calls to noreturn functions. */
#ifdef STACK_GROWS_DOWNWARD
offset = -args_size;
#else
offset = args_size;
#endif
}
else if (GET_CODE (PATTERN (insn)) == SET)
offset = stack_adjust_offset (PATTERN (insn));
else if (GET_CODE (PATTERN (insn)) == PARALLEL
|| GET_CODE (PATTERN (insn)) == SEQUENCE)
{
/* There may be stack adjustments inside compound insns. Search
for them. */
for (offset = 0, i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
offset += stack_adjust_offset (XVECEXP (PATTERN (insn), 0, i));
}
else
return;
if (offset == 0)
return;
if (cfa.reg == STACK_POINTER_REGNUM)
cfa.offset += offset;
#ifndef STACK_GROWS_DOWNWARD
offset = -offset;
#endif
args_size += offset;
if (args_size < 0)
args_size = 0;
label = dwarf2out_cfi_label ();
def_cfa_1 (label, &cfa);
dwarf2out_args_size (label, args_size);
}
/* We delay emitting a register save until either (a) we reach the end
of the prologue or (b) the register is clobbered. This clusters
register saves so that there are fewer pc advances. */
struct queued_reg_save
{
struct queued_reg_save *next;
rtx reg;
long cfa_offset;
};
static struct queued_reg_save *queued_reg_saves;
static const char *last_reg_save_label;
static void
queue_reg_save (label, reg, offset)
const char *label;
rtx reg;
long offset;
{
struct queued_reg_save *q = (struct queued_reg_save *) xmalloc (sizeof (*q));
q->next = queued_reg_saves;
q->reg = reg;
q->cfa_offset = offset;
queued_reg_saves = q;
last_reg_save_label = label;
}
static void
flush_queued_reg_saves ()
{
struct queued_reg_save *q, *next;
for (q = queued_reg_saves; q; q = next)
{
dwarf2out_reg_save (last_reg_save_label, REGNO (q->reg), q->cfa_offset);
next = q->next;
free (q);
}
queued_reg_saves = NULL;
last_reg_save_label = NULL;
}
static bool
clobbers_queued_reg_save (insn)
rtx insn;
{
struct queued_reg_save *q;
for (q = queued_reg_saves; q; q = q->next)
if (modified_in_p (q->reg, insn))
return true;
return false;
}
/* A temporary register holding an integral value used in adjusting SP
or setting up the store_reg. The "offset" field holds the integer
value, not an offset. */
static dw_cfa_location cfa_temp;
/* Record call frame debugging information for an expression EXPR,
which either sets SP or FP (adjusting how we calculate the frame
address) or saves a register to the stack. LABEL indicates the
address of EXPR.
This function encodes a state machine mapping rtxes to actions on
cfa, cfa_store, and cfa_temp.reg. We describe these rules so
users need not read the source code.
The High-Level Picture
Changes in the register we use to calculate the CFA: Currently we
assume that if you copy the CFA register into another register, we
should take the other one as the new CFA register; this seems to
work pretty well. If it's wrong for some target, it's simple
enough not to set RTX_FRAME_RELATED_P on the insn in question.
Changes in the register we use for saving registers to the stack:
This is usually SP, but not always. Again, we deduce that if you
copy SP into another register (and SP is not the CFA register),
then the new register is the one we will be using for register
saves. This also seems to work.
Register saves: There's not much guesswork about this one; if
RTX_FRAME_RELATED_P is set on an insn which modifies memory, it's a
register save, and the register used to calculate the destination
had better be the one we think we're using for this purpose.
Except: If the register being saved is the CFA register, and the
offset is nonzero, we are saving the CFA, so we assume we have to
use DW_CFA_def_cfa_expression. If the offset is 0, we assume that
the intent is to save the value of SP from the previous frame.
Invariants / Summaries of Rules
cfa current rule for calculating the CFA. It usually
consists of a register and an offset.
cfa_store register used by prologue code to save things to the stack
cfa_store.offset is the offset from the value of
cfa_store.reg to the actual CFA
cfa_temp register holding an integral value. cfa_temp.offset
stores the value, which will be used to adjust the
stack pointer. cfa_temp is also used like cfa_store,
to track stores to the stack via fp or a temp reg.
Rules 1- 4: Setting a register's value to cfa.reg or an expression
with cfa.reg as the first operand changes the cfa.reg and its
cfa.offset. Rule 1 and 4 also set cfa_temp.reg and
cfa_temp.offset.
Rules 6- 9: Set a non-cfa.reg register value to a constant or an
expression yielding a constant. This sets cfa_temp.reg
and cfa_temp.offset.
Rule 5: Create a new register cfa_store used to save items to the
stack.
Rules 10-14: Save a register to the stack. Define offset as the
difference of the original location and cfa_store's
location (or cfa_temp's location if cfa_temp is used).
The Rules
"{a,b}" indicates a choice of a xor b.
"<reg>:cfa.reg" indicates that <reg> must equal cfa.reg.
Rule 1:
(set <reg1> <reg2>:cfa.reg)
effects: cfa.reg = <reg1>
cfa.offset unchanged
cfa_temp.reg = <reg1>
cfa_temp.offset = cfa.offset
Rule 2:
(set sp ({minus,plus,losum} {sp,fp}:cfa.reg
{<const_int>,<reg>:cfa_temp.reg}))
effects: cfa.reg = sp if fp used
cfa.offset += {+/- <const_int>, cfa_temp.offset} if cfa.reg==sp
cfa_store.offset += {+/- <const_int>, cfa_temp.offset}
if cfa_store.reg==sp
Rule 3:
(set fp ({minus,plus,losum} <reg>:cfa.reg <const_int>))
effects: cfa.reg = fp
cfa_offset += +/- <const_int>
Rule 4:
(set <reg1> ({plus,losum} <reg2>:cfa.reg <const_int>))
constraints: <reg1> != fp
<reg1> != sp
effects: cfa.reg = <reg1>
cfa_temp.reg = <reg1>
cfa_temp.offset = cfa.offset
Rule 5:
(set <reg1> (plus <reg2>:cfa_temp.reg sp:cfa.reg))
constraints: <reg1> != fp
<reg1> != sp
effects: cfa_store.reg = <reg1>
cfa_store.offset = cfa.offset - cfa_temp.offset
Rule 6:
(set <reg> <const_int>)
effects: cfa_temp.reg = <reg>
cfa_temp.offset = <const_int>
Rule 7:
(set <reg1>:cfa_temp.reg (ior <reg2>:cfa_temp.reg <const_int>))
effects: cfa_temp.reg = <reg1>
cfa_temp.offset |= <const_int>
Rule 8:
(set <reg> (high <exp>))
effects: none
Rule 9:
(set <reg> (lo_sum <exp> <const_int>))
effects: cfa_temp.reg = <reg>
cfa_temp.offset = <const_int>
Rule 10:
(set (mem (pre_modify sp:cfa_store (???? <reg1> <const_int>))) <reg2>)
effects: cfa_store.offset -= <const_int>
cfa.offset = cfa_store.offset if cfa.reg == sp
cfa.reg = sp
cfa.base_offset = -cfa_store.offset
Rule 11:
(set (mem ({pre_inc,pre_dec} sp:cfa_store.reg)) <reg>)
effects: cfa_store.offset += -/+ mode_size(mem)
cfa.offset = cfa_store.offset if cfa.reg == sp
cfa.reg = sp
cfa.base_offset = -cfa_store.offset
Rule 12:
(set (mem ({minus,plus,losum} <reg1>:{cfa_store,cfa_temp} <const_int>))
<reg2>)
effects: cfa.reg = <reg1>
cfa.base_offset = -/+ <const_int> - {cfa_store,cfa_temp}.offset
Rule 13:
(set (mem <reg1>:{cfa_store,cfa_temp}) <reg2>)
effects: cfa.reg = <reg1>
cfa.base_offset = -{cfa_store,cfa_temp}.offset
Rule 14:
(set (mem (postinc <reg1>:cfa_temp <const_int>)) <reg2>)
effects: cfa.reg = <reg1>
cfa.base_offset = -cfa_temp.offset
cfa_temp.offset -= mode_size(mem) */
static void
dwarf2out_frame_debug_expr (expr, label)
rtx expr;
const char *label;
{
rtx src, dest;
HOST_WIDE_INT offset;
/* If RTX_FRAME_RELATED_P is set on a PARALLEL, process each member of
the PARALLEL independently. The first element is always processed if
it is a SET. This is for backward compatibility. Other elements
are processed only if they are SETs and the RTX_FRAME_RELATED_P
flag is set in them. */
if (GET_CODE (expr) == PARALLEL || GET_CODE (expr) == SEQUENCE)
{
int par_index;
int limit = XVECLEN (expr, 0);
for (par_index = 0; par_index < limit; par_index++)
if (GET_CODE (XVECEXP (expr, 0, par_index)) == SET
&& (RTX_FRAME_RELATED_P (XVECEXP (expr, 0, par_index))
|| par_index == 0))
dwarf2out_frame_debug_expr (XVECEXP (expr, 0, par_index), label);
return;
}
if (GET_CODE (expr) != SET)
abort ();
src = SET_SRC (expr);
dest = SET_DEST (expr);
switch (GET_CODE (dest))
{
case REG:
/* Rule 1 */
/* Update the CFA rule wrt SP or FP. Make sure src is
relative to the current CFA register. */
switch (GET_CODE (src))
{
/* Setting FP from SP. */
case REG:
if (cfa.reg == (unsigned) REGNO (src))
/* OK. */
;
else
abort ();
/* We used to require that dest be either SP or FP, but the
ARM copies SP to a temporary register, and from there to
FP. So we just rely on the backends to only set
RTX_FRAME_RELATED_P on appropriate insns. */
cfa.reg = REGNO (dest);
cfa_temp.reg = cfa.reg;
cfa_temp.offset = cfa.offset;
break;
case PLUS:
case MINUS:
case LO_SUM:
if (dest == stack_pointer_rtx)
{
/* Rule 2 */
/* Adjusting SP. */
switch (GET_CODE (XEXP (src, 1)))
{
case CONST_INT:
offset = INTVAL (XEXP (src, 1));
break;
case REG:
if ((unsigned) REGNO (XEXP (src, 1)) != cfa_temp.reg)
abort ();
offset = cfa_temp.offset;
break;
default:
abort ();
}
if (XEXP (src, 0) == hard_frame_pointer_rtx)
{
/* Restoring SP from FP in the epilogue. */
if (cfa.reg != (unsigned) HARD_FRAME_POINTER_REGNUM)
abort ();
cfa.reg = STACK_POINTER_REGNUM;
}
else if (GET_CODE (src) == LO_SUM)
/* Assume we've set the source reg of the LO_SUM from sp. */
;
else if (XEXP (src, 0) != stack_pointer_rtx)
abort ();
if (GET_CODE (src) != MINUS)
offset = -offset;
if (cfa.reg == STACK_POINTER_REGNUM)
cfa.offset += offset;
if (cfa_store.reg == STACK_POINTER_REGNUM)
cfa_store.offset += offset;
}
else if (dest == hard_frame_pointer_rtx)
{
/* Rule 3 */
/* Either setting the FP from an offset of the SP,
or adjusting the FP */
if (! frame_pointer_needed)
abort ();
if (GET_CODE (XEXP (src, 0)) == REG
&& (unsigned) REGNO (XEXP (src, 0)) == cfa.reg
&& GET_CODE (XEXP (src, 1)) == CONST_INT)
{
offset = INTVAL (XEXP (src, 1));
if (GET_CODE (src) != MINUS)
offset = -offset;
cfa.offset += offset;
cfa.reg = HARD_FRAME_POINTER_REGNUM;
}
else
abort ();
}
else
{
if (GET_CODE (src) == MINUS)
abort ();
/* Rule 4 */
if (GET_CODE (XEXP (src, 0)) == REG
&& REGNO (XEXP (src, 0)) == cfa.reg
&& GET_CODE (XEXP (src, 1)) == CONST_INT)
{
/* Setting a temporary CFA register that will be copied
into the FP later on. */
offset = - INTVAL (XEXP (src, 1));
cfa.offset += offset;
cfa.reg = REGNO (dest);
/* Or used to save regs to the stack. */
cfa_temp.reg = cfa.reg;
cfa_temp.offset = cfa.offset;
}
/* Rule 5 */
else if (GET_CODE (XEXP (src, 0)) == REG
&& REGNO (XEXP (src, 0)) == cfa_temp.reg
&& XEXP (src, 1) == stack_pointer_rtx)
{
/* Setting a scratch register that we will use instead
of SP for saving registers to the stack. */
if (cfa.reg != STACK_POINTER_REGNUM)
abort ();
cfa_store.reg = REGNO (dest);
cfa_store.offset = cfa.offset - cfa_temp.offset;
}
/* Rule 9 */
else if (GET_CODE (src) == LO_SUM
&& GET_CODE (XEXP (src, 1)) == CONST_INT)
{
cfa_temp.reg = REGNO (dest);
cfa_temp.offset = INTVAL (XEXP (src, 1));
}
else
abort ();
}
break;
/* Rule 6 */
case CONST_INT:
cfa_temp.reg = REGNO (dest);
cfa_temp.offset = INTVAL (src);
break;
/* Rule 7 */
case IOR:
if (GET_CODE (XEXP (src, 0)) != REG
|| (unsigned) REGNO (XEXP (src, 0)) != cfa_temp.reg
|| GET_CODE (XEXP (src, 1)) != CONST_INT)
abort ();
if ((unsigned) REGNO (dest) != cfa_temp.reg)
cfa_temp.reg = REGNO (dest);
cfa_temp.offset |= INTVAL (XEXP (src, 1));
break;
/* Skip over HIGH, assuming it will be followed by a LO_SUM,
which will fill in all of the bits. */
/* Rule 8 */
case HIGH:
break;
default:
abort ();
}
def_cfa_1 (label, &cfa);
break;
case MEM:
if (GET_CODE (src) != REG)
abort ();
/* Saving a register to the stack. Make sure dest is relative to the
CFA register. */
switch (GET_CODE (XEXP (dest, 0)))
{
/* Rule 10 */
/* With a push. */
case PRE_MODIFY:
/* We can't handle variable size modifications. */
if (GET_CODE (XEXP (XEXP (XEXP (dest, 0), 1), 1)) != CONST_INT)
abort ();
offset = -INTVAL (XEXP (XEXP (XEXP (dest, 0), 1), 1));
if (REGNO (XEXP (XEXP (dest, 0), 0)) != STACK_POINTER_REGNUM
|| cfa_store.reg != STACK_POINTER_REGNUM)
abort ();
cfa_store.offset += offset;
if (cfa.reg == STACK_POINTER_REGNUM)
cfa.offset = cfa_store.offset;
offset = -cfa_store.offset;
break;
/* Rule 11 */
case PRE_INC:
case PRE_DEC:
offset = GET_MODE_SIZE (GET_MODE (dest));
if (GET_CODE (XEXP (dest, 0)) == PRE_INC)
offset = -offset;
if (REGNO (XEXP (XEXP (dest, 0), 0)) != STACK_POINTER_REGNUM
|| cfa_store.reg != STACK_POINTER_REGNUM)
abort ();
cfa_store.offset += offset;
if (cfa.reg == STACK_POINTER_REGNUM)
cfa.offset = cfa_store.offset;
offset = -cfa_store.offset;
break;
/* Rule 12 */
/* With an offset. */
case PLUS:
case MINUS:
case LO_SUM:
if (GET_CODE (XEXP (XEXP (dest, 0), 1)) != CONST_INT)
abort ();
offset = INTVAL (XEXP (XEXP (dest, 0), 1));
if (GET_CODE (XEXP (dest, 0)) == MINUS)
offset = -offset;
if (cfa_store.reg == (unsigned) REGNO (XEXP (XEXP (dest, 0), 0)))
offset -= cfa_store.offset;
else if (cfa_temp.reg == (unsigned) REGNO (XEXP (XEXP (dest, 0), 0)))
offset -= cfa_temp.offset;
else
abort ();
break;
/* Rule 13 */
/* Without an offset. */
case REG:
if (cfa_store.reg == (unsigned) REGNO (XEXP (dest, 0)))
offset = -cfa_store.offset;
else if (cfa_temp.reg == (unsigned) REGNO (XEXP (dest, 0)))
offset = -cfa_temp.offset;
else
abort ();
break;
/* Rule 14 */
case POST_INC:
if (cfa_temp.reg != (unsigned) REGNO (XEXP (XEXP (dest, 0), 0)))
abort ();
offset = -cfa_temp.offset;
cfa_temp.offset -= GET_MODE_SIZE (GET_MODE (dest));
break;
default:
abort ();
}
if (REGNO (src) != STACK_POINTER_REGNUM
&& REGNO (src) != HARD_FRAME_POINTER_REGNUM
&& (unsigned) REGNO (src) == cfa.reg)
{
/* We're storing the current CFA reg into the stack. */
if (cfa.offset == 0)
{
/* If the source register is exactly the CFA, assume
we're saving SP like any other register; this happens
on the ARM. */
def_cfa_1 (label, &cfa);
queue_reg_save (label, stack_pointer_rtx, offset);
break;
}
else
{
/* Otherwise, we'll need to look in the stack to
calculate the CFA. */
rtx x = XEXP (dest, 0);
if (GET_CODE (x) != REG)
x = XEXP (x, 0);
if (GET_CODE (x) != REG)
abort ();
cfa.reg = REGNO (x);
cfa.base_offset = offset;
cfa.indirect = 1;
def_cfa_1 (label, &cfa);
break;
}
}
def_cfa_1 (label, &cfa);
queue_reg_save (label, src, offset);
break;
default:
abort ();
}
}
/* Record call frame debugging information for INSN, which either
sets SP or FP (adjusting how we calculate the frame address) or saves a
register to the stack. If INSN is NULL_RTX, initialize our state. */
void
dwarf2out_frame_debug (insn)
rtx insn;
{
const char *label;
rtx src;
if (insn == NULL_RTX)
{
/* Flush any queued register saves. */
flush_queued_reg_saves ();
/* Set up state for generating call frame debug info. */
lookup_cfa (&cfa);
if (cfa.reg != (unsigned long) DWARF_FRAME_REGNUM (STACK_POINTER_REGNUM))
abort ();
cfa.reg = STACK_POINTER_REGNUM;
cfa_store = cfa;
cfa_temp.reg = -1;
cfa_temp.offset = 0;
return;
}
if (GET_CODE (insn) != INSN || clobbers_queued_reg_save (insn))
flush_queued_reg_saves ();
if (! RTX_FRAME_RELATED_P (insn))
{
if (!ACCUMULATE_OUTGOING_ARGS)
dwarf2out_stack_adjust (insn);
return;
}
label = dwarf2out_cfi_label ();
src = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
if (src)
insn = XEXP (src, 0);
else
insn = PATTERN (insn);
dwarf2out_frame_debug_expr (insn, label);
}
/* Output a Call Frame Information opcode and its operand(s). */
static void
output_cfi (cfi, fde, for_eh)
dw_cfi_ref cfi;
dw_fde_ref fde;
int for_eh;
{
if (cfi->dw_cfi_opc == DW_CFA_advance_loc)
dw2_asm_output_data (1, (cfi->dw_cfi_opc
| (cfi->dw_cfi_oprnd1.dw_cfi_offset & 0x3f)),
"DW_CFA_advance_loc 0x%lx",
cfi->dw_cfi_oprnd1.dw_cfi_offset);
else if (cfi->dw_cfi_opc == DW_CFA_offset)
{
dw2_asm_output_data (1, (cfi->dw_cfi_opc
| (cfi->dw_cfi_oprnd1.dw_cfi_reg_num & 0x3f)),
"DW_CFA_offset, column 0x%lx",
cfi->dw_cfi_oprnd1.dw_cfi_reg_num);
dw2_asm_output_data_uleb128 (cfi->dw_cfi_oprnd2.dw_cfi_offset, NULL);
}
else if (cfi->dw_cfi_opc == DW_CFA_restore)
dw2_asm_output_data (1, (cfi->dw_cfi_opc
| (cfi->dw_cfi_oprnd1.dw_cfi_reg_num & 0x3f)),
"DW_CFA_restore, column 0x%lx",
cfi->dw_cfi_oprnd1.dw_cfi_reg_num);
else
{
dw2_asm_output_data (1, cfi->dw_cfi_opc,
"%s", dwarf_cfi_name (cfi->dw_cfi_opc));
switch (cfi->dw_cfi_opc)
{
case DW_CFA_set_loc:
if (for_eh)
dw2_asm_output_encoded_addr_rtx (
ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0),
gen_rtx_SYMBOL_REF (Pmode, cfi->dw_cfi_oprnd1.dw_cfi_addr),
NULL);
else
dw2_asm_output_addr (DWARF2_ADDR_SIZE,
cfi->dw_cfi_oprnd1.dw_cfi_addr, NULL);
break;
case DW_CFA_advance_loc1:
dw2_asm_output_delta (1, cfi->dw_cfi_oprnd1.dw_cfi_addr,
fde->dw_fde_current_label, NULL);
fde->dw_fde_current_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
break;
case DW_CFA_advance_loc2:
dw2_asm_output_delta (2, cfi->dw_cfi_oprnd1.dw_cfi_addr,
fde->dw_fde_current_label, NULL);
fde->dw_fde_current_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
break;
case DW_CFA_advance_loc4:
dw2_asm_output_delta (4, cfi->dw_cfi_oprnd1.dw_cfi_addr,
fde->dw_fde_current_label, NULL);
fde->dw_fde_current_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
break;
case DW_CFA_MIPS_advance_loc8:
dw2_asm_output_delta (8, cfi->dw_cfi_oprnd1.dw_cfi_addr,
fde->dw_fde_current_label, NULL);
fde->dw_fde_current_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
break;
case DW_CFA_offset_extended:
case DW_CFA_def_cfa:
dw2_asm_output_data_uleb128 (cfi->dw_cfi_oprnd1.dw_cfi_reg_num,
NULL);
dw2_asm_output_data_uleb128 (cfi->dw_cfi_oprnd2.dw_cfi_offset, NULL);
break;
case DW_CFA_offset_extended_sf:
case DW_CFA_def_cfa_sf:
dw2_asm_output_data_uleb128 (cfi->dw_cfi_oprnd1.dw_cfi_reg_num,
NULL);
dw2_asm_output_data_sleb128 (cfi->dw_cfi_oprnd2.dw_cfi_offset, NULL);
break;
case DW_CFA_restore_extended:
case DW_CFA_undefined:
case DW_CFA_same_value:
case DW_CFA_def_cfa_register:
dw2_asm_output_data_uleb128 (cfi->dw_cfi_oprnd1.dw_cfi_reg_num,
NULL);
break;
case DW_CFA_register:
dw2_asm_output_data_uleb128 (cfi->dw_cfi_oprnd1.dw_cfi_reg_num,
NULL);
dw2_asm_output_data_uleb128 (cfi->dw_cfi_oprnd2.dw_cfi_reg_num,
NULL);
break;
case DW_CFA_def_cfa_offset:
case DW_CFA_GNU_args_size:
dw2_asm_output_data_uleb128 (cfi->dw_cfi_oprnd1.dw_cfi_offset, NULL);
break;
case DW_CFA_def_cfa_offset_sf:
dw2_asm_output_data_sleb128 (cfi->dw_cfi_oprnd1.dw_cfi_offset, NULL);
break;
case DW_CFA_GNU_window_save:
break;
case DW_CFA_def_cfa_expression:
case DW_CFA_expression:
output_cfa_loc (cfi);
break;
case DW_CFA_GNU_negative_offset_extended:
/* Obsoleted by DW_CFA_offset_extended_sf. */
abort ();
default:
break;
}
}
}
/* Output the call frame information used to used to record information
that relates to calculating the frame pointer, and records the
location of saved registers. */
static void
output_call_frame_info (for_eh)
int for_eh;
{
unsigned int i;
dw_fde_ref fde;
dw_cfi_ref cfi;
char l1[20], l2[20], section_start_label[20];
bool any_lsda_needed = false;
char augmentation[6];
int augmentation_size;
int fde_encoding = DW_EH_PE_absptr;
int per_encoding = DW_EH_PE_absptr;
int lsda_encoding = DW_EH_PE_absptr;
/* Don't emit a CIE if there won't be any FDEs. */
if (fde_table_in_use == 0)
return;
/* If we don't have any functions we'll want to unwind out of, don't
emit any EH unwind information. Note that if exceptions aren't
enabled, we won't have collected nothrow information, and if we
asked for asynchronous tables, we always want this info. */
if (for_eh)
{
bool any_eh_needed = !flag_exceptions || flag_asynchronous_unwind_tables;
for (i = 0; i < fde_table_in_use; i++)
if (fde_table[i].uses_eh_lsda)
any_eh_needed = any_lsda_needed = true;
else if (! fde_table[i].nothrow)
any_eh_needed = true;
if (! any_eh_needed)
return;
}
/* We're going to be generating comments, so turn on app. */
if (flag_debug_asm)
app_enable ();
if (for_eh)
(*targetm.asm_out.eh_frame_section) ();
else
named_section_flags (DEBUG_FRAME_SECTION, SECTION_DEBUG);
ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
/* Output the CIE. */
ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
"Length of Common Information Entry");
ASM_OUTPUT_LABEL (asm_out_file, l1);
/* Now that the CIE pointer is PC-relative for EH,
use 0 to identify the CIE. */
dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
(for_eh ? 0 : DW_CIE_ID),
"CIE Identifier Tag");
dw2_asm_output_data (1, DW_CIE_VERSION, "CIE Version");
augmentation[0] = 0;
augmentation_size = 0;
if (for_eh)
{
char *p;
/* Augmentation:
z Indicates that a uleb128 is present to size the
augmentation section.
L Indicates the encoding (and thus presence) of
an LSDA pointer in the FDE augmentation.
R Indicates a non-default pointer encoding for
FDE code pointers.
P Indicates the presence of an encoding + language
personality routine in the CIE augmentation. */
fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
p = augmentation + 1;
if (eh_personality_libfunc)
{
*p++ = 'P';
augmentation_size += 1 + size_of_encoded_value (per_encoding);
}
if (any_lsda_needed)
{
*p++ = 'L';
augmentation_size += 1;
}
if (fde_encoding != DW_EH_PE_absptr)
{
*p++ = 'R';
augmentation_size += 1;
}
if (p > augmentation + 1)
{
augmentation[0] = 'z';
*p = '\0';
}
/* Ug. Some platforms can't do unaligned dynamic relocations at all. */
if (eh_personality_libfunc && per_encoding == DW_EH_PE_aligned)
{
int offset = ( 4 /* Length */
+ 4 /* CIE Id */
+ 1 /* CIE version */
+ strlen (augmentation) + 1 /* Augmentation */
+ size_of_uleb128 (1) /* Code alignment */
+ size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
+ 1 /* RA column */
+ 1 /* Augmentation size */
+ 1 /* Personality encoding */ );
int pad = -offset & (PTR_SIZE - 1);
augmentation_size += pad;
/* Augmentations should be small, so there's scarce need to
iterate for a solution. Die if we exceed one uleb128 byte. */
if (size_of_uleb128 (augmentation_size) != 1)
abort ();
}
}
dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
"CIE Data Alignment Factor");
dw2_asm_output_data (1, DWARF_FRAME_RETURN_COLUMN, "CIE RA Column");
if (augmentation[0])
{
dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
if (eh_personality_libfunc)
{
dw2_asm_output_data (1, per_encoding, "Personality (%s)",
eh_data_format_name (per_encoding));
dw2_asm_output_encoded_addr_rtx (per_encoding,
eh_personality_libfunc, NULL);
}
if (any_lsda_needed)
dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
eh_data_format_name (lsda_encoding));
if (fde_encoding != DW_EH_PE_absptr)
dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
eh_data_format_name (fde_encoding));
}
for (cfi = cie_cfi_head; cfi != NULL; cfi = cfi->dw_cfi_next)
output_cfi (cfi, NULL, for_eh);
/* Pad the CIE out to an address sized boundary. */
ASM_OUTPUT_ALIGN (asm_out_file,
floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
ASM_OUTPUT_LABEL (asm_out_file, l2);
/* Loop through all of the FDE's. */
for (i = 0; i < fde_table_in_use; i++)
{
fde = &fde_table[i];
/* Don't emit EH unwind info for leaf functions that don't need it. */
if (for_eh && !flag_asynchronous_unwind_tables && flag_exceptions
&& (fde->nothrow || fde->all_throwers_are_sibcalls)
&& !fde->uses_eh_lsda)
continue;
ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, FDE_LABEL, for_eh + i * 2);
ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + i * 2);
ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + i * 2);
dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
"FDE Length");
ASM_OUTPUT_LABEL (asm_out_file, l1);
if (for_eh)
dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
else
dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
"FDE CIE offset");
if (for_eh)
{
dw2_asm_output_encoded_addr_rtx (fde_encoding,
gen_rtx_SYMBOL_REF (Pmode, fde->dw_fde_begin),
"FDE initial location");
dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
fde->dw_fde_end, fde->dw_fde_begin,
"FDE address range");
}
else
{
dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
"FDE initial location");
dw2_asm_output_delta (DWARF2_ADDR_SIZE,
fde->dw_fde_end, fde->dw_fde_begin,
"FDE address range");
}
if (augmentation[0])
{
if (any_lsda_needed)
{
int size = size_of_encoded_value (lsda_encoding);
if (lsda_encoding == DW_EH_PE_aligned)
{
int offset = ( 4 /* Length */
+ 4 /* CIE offset */
+ 2 * size_of_encoded_value (fde_encoding)
+ 1 /* Augmentation size */ );
int pad = -offset & (PTR_SIZE - 1);
size += pad;
if (size_of_uleb128 (size) != 1)
abort ();
}
dw2_asm_output_data_uleb128 (size, "Augmentation size");
if (fde->uses_eh_lsda)
{
ASM_GENERATE_INTERNAL_LABEL (l1, "LLSDA",
fde->funcdef_number);
dw2_asm_output_encoded_addr_rtx (
lsda_encoding, gen_rtx_SYMBOL_REF (Pmode, l1),
"Language Specific Data Area");
}
else
{
if (lsda_encoding == DW_EH_PE_aligned)
ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
dw2_asm_output_data
(size_of_encoded_value (lsda_encoding), 0,
"Language Specific Data Area (none)");
}
}
else
dw2_asm_output_data_uleb128 (0, "Augmentation size");
}
/* Loop through the Call Frame Instructions associated with
this FDE. */
fde->dw_fde_current_label = fde->dw_fde_begin;
for (cfi = fde->dw_fde_cfi; cfi != NULL; cfi = cfi->dw_cfi_next)
output_cfi (cfi, fde, for_eh);
/* Pad the FDE out to an address sized boundary. */
ASM_OUTPUT_ALIGN (asm_out_file,
floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
ASM_OUTPUT_LABEL (asm_out_file, l2);
}
if (for_eh && targetm.terminate_dw2_eh_frame_info)
dw2_asm_output_data (4, 0, "End of Table");
#ifdef MIPS_DEBUGGING_INFO
/* Work around Irix 6 assembler bug whereby labels at the end of a section
get a value of 0. Putting .align 0 after the label fixes it. */
ASM_OUTPUT_ALIGN (asm_out_file, 0);
#endif
/* Turn off app to make assembly quicker. */
if (flag_debug_asm)
app_disable ();
}
/* Output a marker (i.e. a label) for the beginning of a function, before
the prologue. */
void
dwarf2out_begin_prologue (line, file)
unsigned int line ATTRIBUTE_UNUSED;
const char *file ATTRIBUTE_UNUSED;
{
char label[MAX_ARTIFICIAL_LABEL_BYTES];
dw_fde_ref fde;
current_function_func_begin_label = 0;
#ifdef IA64_UNWIND_INFO
/* ??? current_function_func_begin_label is also used by except.c
for call-site information. We must emit this label if it might
be used. */
if ((! flag_exceptions || USING_SJLJ_EXCEPTIONS)
&& ! dwarf2out_do_frame ())
return;
#else
if (! dwarf2out_do_frame ())
return;
#endif
function_section (current_function_decl);
ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
current_function_funcdef_no);
ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
current_function_funcdef_no);
current_function_func_begin_label = get_identifier (label);
#ifdef IA64_UNWIND_INFO
/* We can elide the fde allocation if we're not emitting debug info. */
if (! dwarf2out_do_frame ())
return;
#endif
/* Expand the fde table if necessary. */
if (fde_table_in_use == fde_table_allocated)
{
fde_table_allocated += FDE_TABLE_INCREMENT;
fde_table
= (dw_fde_ref) xrealloc (fde_table,
fde_table_allocated * sizeof (dw_fde_node));
}
/* Record the FDE associated with this function. */
current_funcdef_fde = fde_table_in_use;
/* Add the new FDE at the end of the fde_table. */
fde = &fde_table[fde_table_in_use++];
fde->dw_fde_begin = xstrdup (label);
fde->dw_fde_current_label = NULL;
fde->dw_fde_end = NULL;
fde->dw_fde_cfi = NULL;
fde->funcdef_number = current_function_funcdef_no;
fde->nothrow = current_function_nothrow;
fde->uses_eh_lsda = cfun->uses_eh_lsda;
fde->all_throwers_are_sibcalls = cfun->all_throwers_are_sibcalls;
args_size = old_args_size = 0;
/* We only want to output line number information for the genuine dwarf2
prologue case, not the eh frame case. */
#ifdef DWARF2_DEBUGGING_INFO
if (file)
dwarf2out_source_line (line, file);
#endif
}
/* Output a marker (i.e. a label) for the absolute end of the generated code
for a function definition. This gets called *after* the epilogue code has
been generated. */
void
dwarf2out_end_epilogue (line, file)
unsigned int line ATTRIBUTE_UNUSED;
const char *file ATTRIBUTE_UNUSED;
{
dw_fde_ref fde;
char label[MAX_ARTIFICIAL_LABEL_BYTES];
/* Output a label to mark the endpoint of the code generated for this
function. */
ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
current_function_funcdef_no);
ASM_OUTPUT_LABEL (asm_out_file, label);
fde = &fde_table[fde_table_in_use - 1];
fde->dw_fde_end = xstrdup (label);
}
void
dwarf2out_frame_init ()
{
/* Allocate the initial hunk of the fde_table. */
fde_table = (dw_fde_ref) xcalloc (FDE_TABLE_INCREMENT, sizeof (dw_fde_node));
fde_table_allocated = FDE_TABLE_INCREMENT;
fde_table_in_use = 0;
/* Generate the CFA instructions common to all FDE's. Do it now for the
sake of lookup_cfa. */
#ifdef DWARF2_UNWIND_INFO
/* On entry, the Canonical Frame Address is at SP. */
dwarf2out_def_cfa (NULL, STACK_POINTER_REGNUM, INCOMING_FRAME_SP_OFFSET);
initial_return_save (INCOMING_RETURN_ADDR_RTX);
#endif
}
void
dwarf2out_frame_finish ()
{
/* Output call frame information. */
if (write_symbols == DWARF2_DEBUG || write_symbols == VMS_AND_DWARF2_DEBUG)
output_call_frame_info (0);
if (! USING_SJLJ_EXCEPTIONS && (flag_unwind_tables || flag_exceptions))
output_call_frame_info (1);
}
/* And now, the subset of the debugging information support code necessary
for emitting location expressions. */
/* We need some way to distinguish DW_OP_addr with a direct symbol
relocation from DW_OP_addr with a dtp-relative symbol relocation. */
#define INTERNAL_DW_OP_tls_addr (0x100 + DW_OP_addr)
typedef struct dw_val_struct *dw_val_ref;
typedef struct die_struct *dw_die_ref;
typedef struct dw_loc_descr_struct *dw_loc_descr_ref;
typedef struct dw_loc_list_struct *dw_loc_list_ref;
/* Each DIE may have a series of attribute/value pairs. Values
can take on several forms. The forms that are used in this
implementation are listed below. */
typedef enum
{
dw_val_class_addr,
dw_val_class_offset,
dw_val_class_loc,
dw_val_class_loc_list,
dw_val_class_range_list,
dw_val_class_const,
dw_val_class_unsigned_const,
dw_val_class_long_long,
dw_val_class_float,
dw_val_class_flag,
dw_val_class_die_ref,
dw_val_class_fde_ref,
dw_val_class_lbl_id,
dw_val_class_lbl_offset,
dw_val_class_str
}
dw_val_class;
/* Describe a double word constant value. */
/* ??? Every instance of long_long in the code really means CONST_DOUBLE. */
typedef struct dw_long_long_struct
{
unsigned long hi;
unsigned long low;
}
dw_long_long_const;
/* Describe a floating point constant value. */
typedef struct dw_fp_struct
{
long *array;
unsigned length;
}
dw_float_const;
/* The dw_val_node describes an attribute's value, as it is
represented internally. */
typedef struct dw_val_struct
{
dw_val_class val_class;
union
{
rtx val_addr;
long unsigned val_offset;
dw_loc_list_ref val_loc_list;
dw_loc_descr_ref val_loc;
long int val_int;
long unsigned val_unsigned;
dw_long_long_const val_long_long;
dw_float_const val_float;
struct
{
dw_die_ref die;
int external;
} val_die_ref;
unsigned val_fde_index;
struct indirect_string_node *val_str;
char *val_lbl_id;
unsigned char val_flag;
}
v;
}
dw_val_node;
/* Locations in memory are described using a sequence of stack machine
operations. */
typedef struct dw_loc_descr_struct
{
dw_loc_descr_ref dw_loc_next;
enum dwarf_location_atom dw_loc_opc;
dw_val_node dw_loc_oprnd1;
dw_val_node dw_loc_oprnd2;
int dw_loc_addr;
}
dw_loc_descr_node;
/* Location lists are ranges + location descriptions for that range,
so you can track variables that are in different places over
their entire life. */
typedef struct dw_loc_list_struct
{
dw_loc_list_ref dw_loc_next;
const char *begin; /* Label for begin address of range */
const char *end; /* Label for end address of range */
char *ll_symbol; /* Label for beginning of location list.
Only on head of list */
const char *section; /* Section this loclist is relative to */
dw_loc_descr_ref expr;
} dw_loc_list_node;
static const char *dwarf_stack_op_name PARAMS ((unsigned));
static dw_loc_descr_ref new_loc_descr PARAMS ((enum dwarf_location_atom,
unsigned long,
unsigned long));
static void add_loc_descr PARAMS ((dw_loc_descr_ref *,
dw_loc_descr_ref));
static unsigned long size_of_loc_descr PARAMS ((dw_loc_descr_ref));
static unsigned long size_of_locs PARAMS ((dw_loc_descr_ref));
static void output_loc_operands PARAMS ((dw_loc_descr_ref));
static void output_loc_sequence PARAMS ((dw_loc_descr_ref));
/* Convert a DWARF stack opcode into its string name. */
static const char *
dwarf_stack_op_name (op)
unsigned op;
{
switch (op)
{
case DW_OP_addr:
case INTERNAL_DW_OP_tls_addr:
return "DW_OP_addr";
case DW_OP_deref:
return "DW_OP_deref";
case DW_OP_const1u:
return "DW_OP_const1u";
case DW_OP_const1s:
return "DW_OP_const1s";
case DW_OP_const2u:
return "DW_OP_const2u";
case DW_OP_const2s:
return "DW_OP_const2s";
case DW_OP_const4u:
return "DW_OP_const4u";
case DW_OP_const4s:
return "DW_OP_const4s";
case DW_OP_const8u:
return "DW_OP_const8u";
case DW_OP_const8s:
return "DW_OP_const8s";
case DW_OP_constu:
return "DW_OP_constu";
case DW_OP_consts:
return "DW_OP_consts";
case DW_OP_dup:
return "DW_OP_dup";
case DW_OP_drop:
return "DW_OP_drop";
case DW_OP_over:
return "DW_OP_over";
case DW_OP_pick:
return "DW_OP_pick";
case DW_OP_swap:
return "DW_OP_swap";
case DW_OP_rot:
return "DW_OP_rot";
case DW_OP_xderef:
return "DW_OP_xderef";
case DW_OP_abs:
return "DW_OP_abs";
case DW_OP_and:
return "DW_OP_and";
case DW_OP_div:
return "DW_OP_div";
case DW_OP_minus:
return "DW_OP_minus";
case DW_OP_mod:
return "DW_OP_mod";
case DW_OP_mul:
return "DW_OP_mul";
case DW_OP_neg:
return "DW_OP_neg";
case DW_OP_not:
return "DW_OP_not";
case DW_OP_or:
return "DW_OP_or";
case DW_OP_plus:
return "DW_OP_plus";
case DW_OP_plus_uconst:
return "DW_OP_plus_uconst";
case DW_OP_shl:
return "DW_OP_shl";
case DW_OP_shr:
return "DW_OP_shr";
case DW_OP_shra:
return "DW_OP_shra";
case DW_OP_xor:
return "DW_OP_xor";
case DW_OP_bra:
return "DW_OP_bra";
case DW_OP_eq:
return "DW_OP_eq";
case DW_OP_ge:
return "DW_OP_ge";
case DW_OP_gt:
return "DW_OP_gt";
case DW_OP_le:
return "DW_OP_le";
case DW_OP_lt:
return "DW_OP_lt";
case DW_OP_ne:
return "DW_OP_ne";
case DW_OP_skip:
return "DW_OP_skip";
case DW_OP_lit0:
return "DW_OP_lit0";
case DW_OP_lit1:
return "DW_OP_lit1";
case DW_OP_lit2:
return "DW_OP_lit2";
case DW_OP_lit3:
return "DW_OP_lit3";
case DW_OP_lit4:
return "DW_OP_lit4";
case DW_OP_lit5:
return "DW_OP_lit5";
case DW_OP_lit6:
return "DW_OP_lit6";
case DW_OP_lit7:
return "DW_OP_lit7";
case DW_OP_lit8:
return "DW_OP_lit8";
case DW_OP_lit9:
return "DW_OP_lit9";
case DW_OP_lit10:
return "DW_OP_lit10";
case DW_OP_lit11:
return "DW_OP_lit11";
case DW_OP_lit12:
return "DW_OP_lit12";
case DW_OP_lit13:
return "DW_OP_lit13";
case DW_OP_lit14:
return "DW_OP_lit14";
case DW_OP_lit15:
return "DW_OP_lit15";
case DW_OP_lit16:
return "DW_OP_lit16";
case DW_OP_lit17:
return "DW_OP_lit17";
case DW_OP_lit18:
return "DW_OP_lit18";
case DW_OP_lit19:
return "DW_OP_lit19";
case DW_OP_lit20:
return "DW_OP_lit20";
case DW_OP_lit21:
return "DW_OP_lit21";
case DW_OP_lit22:
return "DW_OP_lit22";
case DW_OP_lit23:
return "DW_OP_lit23";
case DW_OP_lit24:
return "DW_OP_lit24";
case DW_OP_lit25:
return "DW_OP_lit25";
case DW_OP_lit26:
return "DW_OP_lit26";
case DW_OP_lit27:
return "DW_OP_lit27";
case DW_OP_lit28:
return "DW_OP_lit28";
case DW_OP_lit29:
return "DW_OP_lit29";
case DW_OP_lit30:
return "DW_OP_lit30";
case DW_OP_lit31:
return "DW_OP_lit31";
case DW_OP_reg0:
return "DW_OP_reg0";
case DW_OP_reg1:
return "DW_OP_reg1";
case DW_OP_reg2:
return "DW_OP_reg2";
case DW_OP_reg3:
return "DW_OP_reg3";
case DW_OP_reg4:
return "DW_OP_reg4";
case DW_OP_reg5:
return "DW_OP_reg5";
case DW_OP_reg6:
return "DW_OP_reg6";
case DW_OP_reg7:
return "DW_OP_reg7";
case DW_OP_reg8:
return "DW_OP_reg8";
case DW_OP_reg9:
return "DW_OP_reg9";
case DW_OP_reg10:
return "DW_OP_reg10";
case DW_OP_reg11:
return "DW_OP_reg11";
case DW_OP_reg12:
return "DW_OP_reg12";
case DW_OP_reg13:
return "DW_OP_reg13";
case DW_OP_reg14:
return "DW_OP_reg14";
case DW_OP_reg15:
return "DW_OP_reg15";
case DW_OP_reg16:
return "DW_OP_reg16";
case DW_OP_reg17:
return "DW_OP_reg17";
case DW_OP_reg18:
return "DW_OP_reg18";
case DW_OP_reg19:
return "DW_OP_reg19";
case DW_OP_reg20:
return "DW_OP_reg20";
case DW_OP_reg21:
return "DW_OP_reg21";
case DW_OP_reg22:
return "DW_OP_reg22";
case DW_OP_reg23:
return "DW_OP_reg23";
case DW_OP_reg24:
return "DW_OP_reg24";
case DW_OP_reg25:
return "DW_OP_reg25";
case DW_OP_reg26:
return "DW_OP_reg26";
case DW_OP_reg27:
return "DW_OP_reg27";
case DW_OP_reg28:
return "DW_OP_reg28";
case DW_OP_reg29:
return "DW_OP_reg29";
case DW_OP_reg30:
return "DW_OP_reg30";
case DW_OP_reg31:
return "DW_OP_reg31";
case DW_OP_breg0:
return "DW_OP_breg0";
case DW_OP_breg1:
return "DW_OP_breg1";
case DW_OP_breg2:
return "DW_OP_breg2";
case DW_OP_breg3:
return "DW_OP_breg3";
case DW_OP_breg4:
return "DW_OP_breg4";
case DW_OP_breg5:
return "DW_OP_breg5";
case DW_OP_breg6:
return "DW_OP_breg6";
case DW_OP_breg7:
return "DW_OP_breg7";
case DW_OP_breg8:
return "DW_OP_breg8";
case DW_OP_breg9:
return "DW_OP_breg9";
case DW_OP_breg10:
return "DW_OP_breg10";
case DW_OP_breg11:
return "DW_OP_breg11";
case DW_OP_breg12:
return "DW_OP_breg12";
case DW_OP_breg13:
return "DW_OP_breg13";
case DW_OP_breg14:
return "DW_OP_breg14";
case DW_OP_breg15:
return "DW_OP_breg15";
case DW_OP_breg16:
return "DW_OP_breg16";
case DW_OP_breg17:
return "DW_OP_breg17";
case DW_OP_breg18:
return "DW_OP_breg18";
case DW_OP_breg19:
return "DW_OP_breg19";
case DW_OP_breg20:
return "DW_OP_breg20";
case DW_OP_breg21:
return "DW_OP_breg21";
case DW_OP_breg22:
return "DW_OP_breg22";
case DW_OP_breg23:
return "DW_OP_breg23";
case DW_OP_breg24:
return "DW_OP_breg24";
case DW_OP_breg25:
return "DW_OP_breg25";
case DW_OP_breg26:
return "DW_OP_breg26";
case DW_OP_breg27:
return "DW_OP_breg27";
case DW_OP_breg28:
return "DW_OP_breg28";
case DW_OP_breg29:
return "DW_OP_breg29";
case DW_OP_breg30:
return "DW_OP_breg30";
case DW_OP_breg31:
return "DW_OP_breg31";
case DW_OP_regx:
return "DW_OP_regx";
case DW_OP_fbreg:
return "DW_OP_fbreg";
case DW_OP_bregx:
return "DW_OP_bregx";
case DW_OP_piece:
return "DW_OP_piece";
case DW_OP_deref_size:
return "DW_OP_deref_size";
case DW_OP_xderef_size:
return "DW_OP_xderef_size";
case DW_OP_nop:
return "DW_OP_nop";
case DW_OP_push_object_address:
return "DW_OP_push_object_address";
case DW_OP_call2:
return "DW_OP_call2";
case DW_OP_call4:
return "DW_OP_call4";
case DW_OP_call_ref:
return "DW_OP_call_ref";
case DW_OP_GNU_push_tls_address:
return "DW_OP_GNU_push_tls_address";
default:
return "OP_<unknown>";
}
}
/* Return a pointer to a newly allocated location description. Location
descriptions are simple expression terms that can be strung
together to form more complicated location (address) descriptions. */
static inline dw_loc_descr_ref
new_loc_descr (op, oprnd1, oprnd2)
enum dwarf_location_atom op;
unsigned long oprnd1;
unsigned long oprnd2;
{
/* Use xcalloc here so we clear out all of the long_long constant in
the union. */
dw_loc_descr_ref descr
= (dw_loc_descr_ref) xcalloc (1, sizeof (dw_loc_descr_node));
descr->dw_loc_opc = op;
descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
return descr;
}
/* Add a location description term to a location description expression. */
static inline void
add_loc_descr (list_head, descr)
dw_loc_descr_ref *list_head;
dw_loc_descr_ref descr;
{
dw_loc_descr_ref *d;
/* Find the end of the chain. */
for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
;
*d = descr;
}
/* Return the size of a location descriptor. */
static unsigned long
size_of_loc_descr (loc)
dw_loc_descr_ref loc;
{
unsigned long size = 1;
switch (loc->dw_loc_opc)
{
case DW_OP_addr:
case INTERNAL_DW_OP_tls_addr:
size += DWARF2_ADDR_SIZE;
break;
case DW_OP_const1u:
case DW_OP_const1s:
size += 1;
break;
case DW_OP_const2u:
case DW_OP_const2s:
size += 2;
break;
case DW_OP_const4u:
case DW_OP_const4s:
size += 4;
break;
case DW_OP_const8u:
case DW_OP_const8s:
size += 8;
break;
case DW_OP_constu:
size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
break;
case DW_OP_consts:
size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
break;
case DW_OP_pick:
size += 1;
break;
case DW_OP_plus_uconst:
size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
break;
case DW_OP_skip:
case DW_OP_bra:
size += 2;
break;
case DW_OP_breg0:
case DW_OP_breg1:
case DW_OP_breg2:
case DW_OP_breg3:
case DW_OP_breg4:
case DW_OP_breg5:
case DW_OP_breg6:
case DW_OP_breg7:
case DW_OP_breg8:
case DW_OP_breg9:
case DW_OP_breg10:
case DW_OP_breg11:
case DW_OP_breg12:
case DW_OP_breg13:
case DW_OP_breg14:
case DW_OP_breg15:
case DW_OP_breg16:
case DW_OP_breg17:
case DW_OP_breg18:
case DW_OP_breg19:
case DW_OP_breg20:
case DW_OP_breg21:
case DW_OP_breg22:
case DW_OP_breg23:
case DW_OP_breg24:
case DW_OP_breg25:
case DW_OP_breg26:
case DW_OP_breg27:
case DW_OP_breg28:
case DW_OP_breg29:
case DW_OP_breg30:
case DW_OP_breg31:
size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
break;
case DW_OP_regx:
size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
break;
case DW_OP_fbreg:
size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
break;
case DW_OP_bregx:
size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
break;
case DW_OP_piece:
size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
break;
case DW_OP_deref_size:
case DW_OP_xderef_size:
size += 1;
break;
case DW_OP_call2:
size += 2;
break;
case DW_OP_call4:
size += 4;
break;
case DW_OP_call_ref:
size += DWARF2_ADDR_SIZE;
break;
default:
break;
}
return size;
}
/* Return the size of a series of location descriptors. */
static unsigned long
size_of_locs (loc)
dw_loc_descr_ref loc;
{
unsigned long size;
for (size = 0; loc != NULL; loc = loc->dw_loc_next)
{
loc->dw_loc_addr = size;
size += size_of_loc_descr (loc);
}
return size;
}
/* Output location description stack opcode's operands (if any). */
static void
output_loc_operands (loc)
dw_loc_descr_ref loc;
{
dw_val_ref val1 = &loc->dw_loc_oprnd1;
dw_val_ref val2 = &loc->dw_loc_oprnd2;
switch (loc->dw_loc_opc)
{
#ifdef DWARF2_DEBUGGING_INFO
case DW_OP_addr:
dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
break;
case DW_OP_const2u:
case DW_OP_const2s:
dw2_asm_output_data (2, val1->v.val_int, NULL);
break;
case DW_OP_const4u:
case DW_OP_const4s:
dw2_asm_output_data (4, val1->v.val_int, NULL);
break;
case DW_OP_const8u:
case DW_OP_const8s:
if (HOST_BITS_PER_LONG < 64)
abort ();
dw2_asm_output_data (8, val1->v.val_int, NULL);
break;
case DW_OP_skip:
case DW_OP_bra:
{
int offset;
if (val1->val_class == dw_val_class_loc)
offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
else
abort ();
dw2_asm_output_data (2, offset, NULL);
}
break;
#else
case DW_OP_addr:
case DW_OP_const2u:
case DW_OP_const2s:
case DW_OP_const4u:
case DW_OP_const4s:
case DW_OP_const8u:
case DW_OP_const8s:
case DW_OP_skip:
case DW_OP_bra:
/* We currently don't make any attempt to make sure these are
aligned properly like we do for the main unwind info, so
don't support emitting things larger than a byte if we're
only doing unwinding. */
abort ();
#endif
case DW_OP_const1u:
case DW_OP_const1s:
dw2_asm_output_data (1, val1->v.val_int, NULL);
break;
case DW_OP_constu:
dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
break;
case DW_OP_consts:
dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
break;
case DW_OP_pick:
dw2_asm_output_data (1, val1->v.val_int, NULL);
break;
case DW_OP_plus_uconst:
dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
break;
case DW_OP_breg0:
case DW_OP_breg1:
case DW_OP_breg2:
case DW_OP_breg3:
case DW_OP_breg4:
case DW_OP_breg5:
case DW_OP_breg6:
case DW_OP_breg7:
case DW_OP_breg8:
case DW_OP_breg9:
case DW_OP_breg10:
case DW_OP_breg11:
case DW_OP_breg12:
case DW_OP_breg13:
case DW_OP_breg14:
case DW_OP_breg15:
case DW_OP_breg16:
case DW_OP_breg17:
case DW_OP_breg18:
case DW_OP_breg19:
case DW_OP_breg20:
case DW_OP_breg21:
case DW_OP_breg22:
case DW_OP_breg23:
case DW_OP_breg24:
case DW_OP_breg25:
case DW_OP_breg26:
case DW_OP_breg27:
case DW_OP_breg28:
case DW_OP_breg29:
case DW_OP_breg30:
case DW_OP_breg31:
dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
break;
case DW_OP_regx:
dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
break;
case DW_OP_fbreg:
dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
break;
case DW_OP_bregx:
dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
break;
case DW_OP_piece:
dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
break;
case DW_OP_deref_size:
case DW_OP_xderef_size:
dw2_asm_output_data (1, val1->v.val_int, NULL);
break;
case INTERNAL_DW_OP_tls_addr:
#ifdef ASM_OUTPUT_DWARF_DTPREL
ASM_OUTPUT_DWARF_DTPREL (asm_out_file, DWARF2_ADDR_SIZE,
val1->v.val_addr);
fputc ('\n', asm_out_file);
#else
abort ();
#endif
break;
default:
/* Other codes have no operands. */
break;
}
}
/* Output a sequence of location operations. */
static void
output_loc_sequence (loc)
dw_loc_descr_ref loc;
{
for (; loc != NULL; loc = loc->dw_loc_next)
{
/* Output the opcode. */
dw2_asm_output_data (1, loc->dw_loc_opc,
"%s", dwarf_stack_op_name (loc->dw_loc_opc));
/* Output the operand(s) (if any). */
output_loc_operands (loc);
}
}
/* This routine will generate the correct assembly data for a location
description based on a cfi entry with a complex address. */
static void
output_cfa_loc (cfi)
dw_cfi_ref cfi;
{
dw_loc_descr_ref loc;
unsigned long size;
/* Output the size of the block. */
loc = cfi->dw_cfi_oprnd1.dw_cfi_loc;
size = size_of_locs (loc);
dw2_asm_output_data_uleb128 (size, NULL);
/* Now output the operations themselves. */
output_loc_sequence (loc);
}
/* This function builds a dwarf location descriptor sequence from
a dw_cfa_location. */
static struct dw_loc_descr_struct *
build_cfa_loc (cfa)
dw_cfa_location *cfa;
{
struct dw_loc_descr_struct *head, *tmp;
if (cfa->indirect == 0)
abort ();
if (cfa->base_offset)
{
if (cfa->reg <= 31)
head = new_loc_descr (DW_OP_breg0 + cfa->reg, cfa->base_offset, 0);
else
head = new_loc_descr (DW_OP_bregx, cfa->reg, cfa->base_offset);
}
else if (cfa->reg <= 31)
head = new_loc_descr (DW_OP_reg0 + cfa->reg, 0, 0);
else
head = new_loc_descr (DW_OP_regx, cfa->reg, 0);
head->dw_loc_oprnd1.val_class = dw_val_class_const;
tmp = new_loc_descr (DW_OP_deref, 0, 0);
add_loc_descr (&head, tmp);
if (cfa->offset != 0)
{
tmp = new_loc_descr (DW_OP_plus_uconst, cfa->offset, 0);
add_loc_descr (&head, tmp);
}
return head;
}
/* This function fills in aa dw_cfa_location structure from a dwarf location
descriptor sequence. */
static void
get_cfa_from_loc_descr (cfa, loc)
dw_cfa_location *cfa;
struct dw_loc_descr_struct *loc;
{
struct dw_loc_descr_struct *ptr;
cfa->offset = 0;
cfa->base_offset = 0;
cfa->indirect = 0;
cfa->reg = -1;
for (ptr = loc; ptr != NULL; ptr = ptr->dw_loc_next)
{
enum dwarf_location_atom op = ptr->dw_loc_opc;
switch (op)
{
case DW_OP_reg0:
case DW_OP_reg1:
case DW_OP_reg2:
case DW_OP_reg3:
case DW_OP_reg4:
case DW_OP_reg5:
case DW_OP_reg6:
case DW_OP_reg7:
case DW_OP_reg8:
case DW_OP_reg9:
case DW_OP_reg10:
case DW_OP_reg11:
case DW_OP_reg12:
case DW_OP_reg13:
case DW_OP_reg14:
case DW_OP_reg15:
case DW_OP_reg16:
case DW_OP_reg17:
case DW_OP_reg18:
case DW_OP_reg19:
case DW_OP_reg20:
case DW_OP_reg21:
case DW_OP_reg22:
case DW_OP_reg23:
case DW_OP_reg24:
case DW_OP_reg25:
case DW_OP_reg26:
case DW_OP_reg27:
case DW_OP_reg28:
case DW_OP_reg29:
case DW_OP_reg30:
case DW_OP_reg31:
cfa->reg = op - DW_OP_reg0;
break;
case DW_OP_regx:
cfa->reg = ptr->dw_loc_oprnd1.v.val_int;
break;
case DW_OP_breg0:
case DW_OP_breg1:
case DW_OP_breg2:
case DW_OP_breg3:
case DW_OP_breg4:
case DW_OP_breg5:
case DW_OP_breg6:
case DW_OP_breg7:
case DW_OP_breg8:
case DW_OP_breg9:
case DW_OP_breg10:
case DW_OP_breg11:
case DW_OP_breg12:
case DW_OP_breg13:
case DW_OP_breg14:
case DW_OP_breg15:
case DW_OP_breg16:
case DW_OP_breg17:
case DW_OP_breg18:
case DW_OP_breg19:
case DW_OP_breg20:
case DW_OP_breg21:
case DW_OP_breg22:
case DW_OP_breg23:
case DW_OP_breg24:
case DW_OP_breg25:
case DW_OP_breg26:
case DW_OP_breg27:
case DW_OP_breg28:
case DW_OP_breg29:
case DW_OP_breg30:
case DW_OP_breg31:
cfa->reg = op - DW_OP_breg0;
cfa->base_offset = ptr->dw_loc_oprnd1.v.val_int;
break;
case DW_OP_bregx:
cfa->reg = ptr->dw_loc_oprnd1.v.val_int;
cfa->base_offset = ptr->dw_loc_oprnd2.v.val_int;
break;
case DW_OP_deref:
cfa->indirect = 1;
break;
case DW_OP_plus_uconst:
cfa->offset = ptr->dw_loc_oprnd1.v.val_unsigned;
break;
default:
internal_error ("DW_LOC_OP %s not implemented\n",
dwarf_stack_op_name (ptr->dw_loc_opc));
}
}
}
#endif /* .debug_frame support */
/* And now, the support for symbolic debugging information. */
#ifdef DWARF2_DEBUGGING_INFO
/* .debug_str support. */
static hashnode indirect_string_alloc PARAMS ((hash_table *));
static int output_indirect_string PARAMS ((struct cpp_reader *,
hashnode, const PTR));
static void dwarf2out_init PARAMS ((const char *));
static void dwarf2out_finish PARAMS ((const char *));
static void dwarf2out_define PARAMS ((unsigned int, const char *));
static void dwarf2out_undef PARAMS ((unsigned int, const char *));
static void dwarf2out_start_source_file PARAMS ((unsigned, const char *));
static void dwarf2out_end_source_file PARAMS ((unsigned));
static void dwarf2out_begin_block PARAMS ((unsigned, unsigned));
static void dwarf2out_end_block PARAMS ((unsigned, unsigned));
static bool dwarf2out_ignore_block PARAMS ((tree));
static void dwarf2out_global_decl PARAMS ((tree));
static void dwarf2out_abstract_function PARAMS ((tree));
/* The debug hooks structure. */
const struct gcc_debug_hooks dwarf2_debug_hooks =
{
dwarf2out_init,
dwarf2out_finish,
dwarf2out_define,
dwarf2out_undef,
dwarf2out_start_source_file,
dwarf2out_end_source_file,
dwarf2out_begin_block,
dwarf2out_end_block,
dwarf2out_ignore_block,
dwarf2out_source_line,
dwarf2out_begin_prologue,
debug_nothing_int_charstar, /* end_prologue */
dwarf2out_end_epilogue,
debug_nothing_tree, /* begin_function */
debug_nothing_int, /* end_function */
dwarf2out_decl, /* function_decl */
dwarf2out_global_decl,
debug_nothing_tree, /* deferred_inline_function */
/* The DWARF 2 backend tries to reduce debugging bloat by not
emitting the abstract description of inline functions until
something tries to reference them. */
dwarf2out_abstract_function, /* outlining_inline_function */
debug_nothing_rtx /* label */
};
/* NOTE: In the comments in this file, many references are made to
"Debugging Information Entries". This term is abbreviated as `DIE'
throughout the remainder of this file. */
/* An internal representation of the DWARF output is built, and then
walked to generate the DWARF debugging info. The walk of the internal
representation is done after the entire program has been compiled.
The types below are used to describe the internal representation. */
/* Various DIE's use offsets relative to the beginning of the
.debug_info section to refer to each other. */
typedef long int dw_offset;
/* Define typedefs here to avoid circular dependencies. */
typedef struct dw_attr_struct *dw_attr_ref;
typedef struct dw_line_info_struct *dw_line_info_ref;
typedef struct dw_separate_line_info_struct *dw_separate_line_info_ref;
typedef struct pubname_struct *pubname_ref;
typedef struct dw_ranges_struct *dw_ranges_ref;
/* Each entry in the line_info_table maintains the file and
line number associated with the label generated for that
entry. The label gives the PC value associated with
the line number entry. */
typedef struct dw_line_info_struct
{
unsigned long dw_file_num;
unsigned long dw_line_num;
}
dw_line_info_entry;
/* Line information for functions in separate sections; each one gets its
own sequence. */
typedef struct dw_separate_line_info_struct
{
unsigned long dw_file_num;
unsigned long dw_line_num;
unsigned long function;
}
dw_separate_line_info_entry;
/* Each DIE attribute has a field specifying the attribute kind,
a link to the next attribute in the chain, and an attribute value.
Attributes are typically linked below the DIE they modify. */
typedef struct dw_attr_struct
{
enum dwarf_attribute dw_attr;
dw_attr_ref dw_attr_next;
dw_val_node dw_attr_val;
}
dw_attr_node;
/* The Debugging Information Entry (DIE) structure */
typedef struct die_struct
{
enum dwarf_tag die_tag;
char *die_symbol;
dw_attr_ref die_attr;
dw_die_ref die_parent;
dw_die_ref die_child;
dw_die_ref die_sib;
dw_offset die_offset;
unsigned long die_abbrev;
int die_mark;
}
die_node;
/* The pubname structure */
typedef struct pubname_struct
{
dw_die_ref die;
char *name;
}
pubname_entry;
struct dw_ranges_struct
{
int block_num;
};
/* The limbo die list structure. */
typedef struct limbo_die_struct
{
dw_die_ref die;
tree created_for;
struct limbo_die_struct *next;
}
limbo_die_node;
/* How to start an assembler comment. */
#ifndef ASM_COMMENT_START
#define ASM_COMMENT_START ";#"
#endif
/* Define a macro which returns nonzero for a TYPE_DECL which was
implicitly generated for a tagged type.
Note that unlike the gcc front end (which generates a NULL named
TYPE_DECL node for each complete tagged type, each array type, and
each function type node created) the g++ front end generates a
_named_ TYPE_DECL node for each tagged type node created.
These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
generate a DW_TAG_typedef DIE for them. */
#define TYPE_DECL_IS_STUB(decl) \
(DECL_NAME (decl) == NULL_TREE \
|| (DECL_ARTIFICIAL (decl) \
&& is_tagged_type (TREE_TYPE (decl)) \
&& ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
/* This is necessary for stub decls that \
appear in nested inline functions. */ \
|| (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
&& (decl_ultimate_origin (decl) \
== TYPE_STUB_DECL (TREE_TYPE (decl)))))))
/* Information concerning the compilation unit's programming
language, and compiler version. */
/* Fixed size portion of the DWARF compilation unit header. */
#define DWARF_COMPILE_UNIT_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 3)
/* Fixed size portion of public names info. */
#define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
/* Fixed size portion of the address range info. */
#define DWARF_ARANGES_HEADER_SIZE \
(DWARF_ROUND (2 * DWARF_OFFSET_SIZE + 4, DWARF2_ADDR_SIZE * 2) \
- DWARF_OFFSET_SIZE)
/* Size of padding portion in the address range info. It must be
aligned to twice the pointer size. */
#define DWARF_ARANGES_PAD_SIZE \
(DWARF_ROUND (2 * DWARF_OFFSET_SIZE + 4, DWARF2_ADDR_SIZE * 2) \
- (2 * DWARF_OFFSET_SIZE + 4))
/* Use assembler line directives if available. */
#ifndef DWARF2_ASM_LINE_DEBUG_INFO
#ifdef HAVE_AS_DWARF2_DEBUG_LINE
#define DWARF2_ASM_LINE_DEBUG_INFO 1
#else
#define DWARF2_ASM_LINE_DEBUG_INFO 0
#endif
#endif
/* Minimum line offset in a special line info. opcode.
This value was chosen to give a reasonable range of values. */
#define DWARF_LINE_BASE -10
/* First special line opcode - leave room for the standard opcodes. */
#define DWARF_LINE_OPCODE_BASE 10
/* Range of line offsets in a special line info. opcode. */
#define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
/* Flag that indicates the initial value of the is_stmt_start flag.
In the present implementation, we do not mark any lines as
the beginning of a source statement, because that information
is not made available by the GCC front-end. */
#define DWARF_LINE_DEFAULT_IS_STMT_START 1
/* This location is used by calc_die_sizes() to keep track
the offset of each DIE within the .debug_info section. */
static unsigned long next_die_offset;
/* Record the root of the DIE's built for the current compilation unit. */
static dw_die_ref comp_unit_die;
/* We need special handling in dwarf2out_start_source_file if it is
first one. */
static int is_main_source;
/* A list of DIEs with a NULL parent waiting to be relocated. */
static limbo_die_node *limbo_die_list = 0;
/* Structure used by lookup_filename to manage sets of filenames. */
struct file_table
{
char **table;
unsigned allocated;
unsigned in_use;
unsigned last_lookup_index;
};
/* Size (in elements) of increments by which we may expand the filename
table. */
#define FILE_TABLE_INCREMENT 64
/* Filenames referenced by this compilation unit. */
static struct file_table file_table;
/* Local pointer to the name of the main input file. Initialized in
dwarf2out_init. */
static const char *primary_filename;
/* A pointer to the base of a table of references to DIE's that describe
declarations. The table is indexed by DECL_UID() which is a unique
number identifying each decl. */
static dw_die_ref *decl_die_table;
/* Number of elements currently allocated for the decl_die_table. */
static unsigned decl_die_table_allocated;
/* Number of elements in decl_die_table currently in use. */
static unsigned decl_die_table_in_use;
/* Size (in elements) of increments by which we may expand the
decl_die_table. */
#define DECL_DIE_TABLE_INCREMENT 256
/* A pointer to the base of a list of references to DIE's that
are uniquely identified by their tag, presence/absence of
children DIE's, and list of attribute/value pairs. */
static dw_die_ref *abbrev_die_table;
/* Number of elements currently allocated for abbrev_die_table. */
static unsigned abbrev_die_table_allocated;
/* Number of elements in type_die_table currently in use. */
static unsigned abbrev_die_table_in_use;
/* Size (in elements) of increments by which we may expand the
abbrev_die_table. */
#define ABBREV_DIE_TABLE_INCREMENT 256
/* A pointer to the base of a table that contains line information
for each source code line in .text in the compilation unit. */
static dw_line_info_ref line_info_table;
/* Number of elements currently allocated for line_info_table. */
static unsigned line_info_table_allocated;
/* Number of elements in separate_line_info_table currently in use. */
static unsigned separate_line_info_table_in_use;
/* A pointer to the base of a table that contains line information
for each source code line outside of .text in the compilation unit. */
static dw_separate_line_info_ref separate_line_info_table;
/* Number of elements currently allocated for separate_line_info_table. */
static unsigned separate_line_info_table_allocated;
/* Number of elements in line_info_table currently in use. */
static unsigned line_info_table_in_use;
/* Size (in elements) of increments by which we may expand the
line_info_table. */
#define LINE_INFO_TABLE_INCREMENT 1024
/* A pointer to the base of a table that contains a list of publicly
accessible names. */
static pubname_ref pubname_table;
/* Number of elements currently allocated for pubname_table. */
static unsigned pubname_table_allocated;
/* Number of elements in pubname_table currently in use. */
static unsigned pubname_table_in_use;
/* Size (in elements) of increments by which we may expand the
pubname_table. */
#define PUBNAME_TABLE_INCREMENT 64
/* Array of dies for which we should generate .debug_arange info. */
static dw_die_ref *arange_table;
/* Number of elements currently allocated for arange_table. */
static unsigned arange_table_allocated;
/* Number of elements in arange_table currently in use. */
static unsigned arange_table_in_use;
/* Size (in elements) of increments by which we may expand the
arange_table. */
#define ARANGE_TABLE_INCREMENT 64
/* Array of dies for which we should generate .debug_ranges info. */
static dw_ranges_ref ranges_table;
/* Number of elements currently allocated for ranges_table. */
static unsigned ranges_table_allocated;
/* Number of elements in ranges_table currently in use. */
static unsigned ranges_table_in_use;
/* Size (in elements) of increments by which we may expand the
ranges_table. */
#define RANGES_TABLE_INCREMENT 64
/* Whether we have location lists that need outputting */
static unsigned have_location_lists;
/* Record whether the function being analyzed contains inlined functions. */
static int current_function_has_inlines;
/* Forward declarations for functions defined in this file. */
static int is_pseudo_reg PARAMS ((rtx));
static tree type_main_variant PARAMS ((tree));
static int is_tagged_type PARAMS ((tree));
static const char *dwarf_tag_name PARAMS ((unsigned));
static const char *dwarf_attr_name PARAMS ((unsigned));
static const char *dwarf_form_name PARAMS ((unsigned));
static tree decl_ultimate_origin PARAMS ((tree));
static tree block_ultimate_origin PARAMS ((tree));
static tree decl_class_context PARAMS ((tree));
static void add_dwarf_attr PARAMS ((dw_die_ref, dw_attr_ref));
static inline dw_val_class AT_class PARAMS ((dw_attr_ref));
static void add_AT_flag PARAMS ((dw_die_ref,
enum dwarf_attribute,
unsigned));
static inline unsigned AT_flag PARAMS ((dw_attr_ref));
static void add_AT_int PARAMS ((dw_die_ref,
enum dwarf_attribute, long));
static inline long int AT_int PARAMS ((dw_attr_ref));
static void add_AT_unsigned PARAMS ((dw_die_ref,
enum dwarf_attribute,
unsigned long));
static inline unsigned long AT_unsigned PARAMS ((dw_attr_ref));
static void add_AT_long_long PARAMS ((dw_die_ref,
enum dwarf_attribute,
unsigned long,
unsigned long));
static void add_AT_float PARAMS ((dw_die_ref,
enum dwarf_attribute,
unsigned, long *));
static void add_AT_string PARAMS ((dw_die_ref,
enum dwarf_attribute,
const char *));
static inline const char *AT_string PARAMS ((dw_attr_ref));
static int AT_string_form PARAMS ((dw_attr_ref));
static void add_AT_die_ref PARAMS ((dw_die_ref,
enum dwarf_attribute,
dw_die_ref));
static inline dw_die_ref AT_ref PARAMS ((dw_attr_ref));
static inline int AT_ref_external PARAMS ((dw_attr_ref));
static inline void set_AT_ref_external PARAMS ((dw_attr_ref, int));
static void add_AT_fde_ref PARAMS ((dw_die_ref,
enum dwarf_attribute,
unsigned));
static void add_AT_loc PARAMS ((dw_die_ref,
enum dwarf_attribute,
dw_loc_descr_ref));
static inline dw_loc_descr_ref AT_loc PARAMS ((dw_attr_ref));
static void add_AT_loc_list PARAMS ((dw_die_ref,
enum dwarf_attribute,
dw_loc_list_ref));
static inline dw_loc_list_ref AT_loc_list PARAMS ((dw_attr_ref));
static void add_AT_addr PARAMS ((dw_die_ref,
enum dwarf_attribute,
rtx));
static inline rtx AT_addr PARAMS ((dw_attr_ref));
static void add_AT_lbl_id PARAMS ((dw_die_ref,
enum dwarf_attribute,
const char *));
static void add_AT_lbl_offset PARAMS ((dw_die_ref,
enum dwarf_attribute,
const char *));
static void add_AT_offset PARAMS ((dw_die_ref,
enum dwarf_attribute,
unsigned long));
static void add_AT_range_list PARAMS ((dw_die_ref,
enum dwarf_attribute,
unsigned long));
static inline const char *AT_lbl PARAMS ((dw_attr_ref));
static dw_attr_ref get_AT PARAMS ((dw_die_ref,
enum dwarf_attribute));
static const char *get_AT_low_pc PARAMS ((dw_die_ref));
static const char *get_AT_hi_pc PARAMS ((dw_die_ref));
static const char *get_AT_string PARAMS ((dw_die_ref,
enum dwarf_attribute));
static int get_AT_flag PARAMS ((dw_die_ref,
enum dwarf_attribute));
static unsigned get_AT_unsigned PARAMS ((dw_die_ref,
enum dwarf_attribute));
static inline dw_die_ref get_AT_ref PARAMS ((dw_die_ref,
enum dwarf_attribute));
static int is_c_family PARAMS ((void));
static int is_cxx PARAMS ((void));
static int is_java PARAMS ((void));
static int is_fortran PARAMS ((void));
static void remove_AT PARAMS ((dw_die_ref,
enum dwarf_attribute));
static inline void free_die PARAMS ((dw_die_ref));
static void remove_children PARAMS ((dw_die_ref));
static void add_child_die PARAMS ((dw_die_ref, dw_die_ref));
static dw_die_ref new_die PARAMS ((enum dwarf_tag, dw_die_ref,
tree));
static dw_die_ref lookup_type_die PARAMS ((tree));
static void equate_type_number_to_die PARAMS ((tree, dw_die_ref));
static dw_die_ref lookup_decl_die PARAMS ((tree));
static void equate_decl_number_to_die PARAMS ((tree, dw_die_ref));
static void print_spaces PARAMS ((FILE *));
static void print_die PARAMS ((dw_die_ref, FILE *));
static void print_dwarf_line_table PARAMS ((FILE *));
static void reverse_die_lists PARAMS ((dw_die_ref));
static void reverse_all_dies PARAMS ((dw_die_ref));
static dw_die_ref push_new_compile_unit PARAMS ((dw_die_ref, dw_die_ref));
static dw_die_ref pop_compile_unit PARAMS ((dw_die_ref));
static void loc_checksum PARAMS ((dw_loc_descr_ref,
struct md5_ctx *));
static void attr_checksum PARAMS ((dw_attr_ref,
struct md5_ctx *,
int *));
static void die_checksum PARAMS ((dw_die_ref,
struct md5_ctx *,
int *));
static int same_loc_p PARAMS ((dw_loc_descr_ref,
dw_loc_descr_ref, int *));
static int same_dw_val_p PARAMS ((dw_val_node *, dw_val_node *,
int *));
static int same_attr_p PARAMS ((dw_attr_ref, dw_attr_ref, int *));
static int same_die_p PARAMS ((dw_die_ref, dw_die_ref, int *));
static int same_die_p_wrap PARAMS ((dw_die_ref, dw_die_ref));
static void compute_section_prefix PARAMS ((dw_die_ref));
static int is_type_die PARAMS ((dw_die_ref));
static int is_comdat_die PARAMS ((dw_die_ref));
static int is_symbol_die PARAMS ((dw_die_ref));
static void assign_symbol_names PARAMS ((dw_die_ref));
static void break_out_includes PARAMS ((dw_die_ref));
static hashval_t htab_cu_hash PARAMS ((const void *));
static int htab_cu_eq PARAMS ((const void *, const void *));
static void htab_cu_del PARAMS ((void *));
static int check_duplicate_cu PARAMS ((dw_die_ref, htab_t, unsigned *));
static void record_comdat_symbol_number PARAMS ((dw_die_ref, htab_t, unsigned));
static void add_sibling_attributes PARAMS ((dw_die_ref));
static void build_abbrev_table PARAMS ((dw_die_ref));
static void output_location_lists PARAMS ((dw_die_ref));
static int constant_size PARAMS ((long unsigned));
static unsigned long size_of_die PARAMS ((dw_die_ref));
static void calc_die_sizes PARAMS ((dw_die_ref));
static void mark_dies PARAMS ((dw_die_ref));
static void unmark_dies PARAMS ((dw_die_ref));
static void unmark_all_dies PARAMS ((dw_die_ref));
static unsigned long size_of_pubnames PARAMS ((void));
static unsigned long size_of_aranges PARAMS ((void));
static enum dwarf_form value_format PARAMS ((dw_attr_ref));
static void output_value_format PARAMS ((dw_attr_ref));
static void output_abbrev_section PARAMS ((void));
static void output_die_symbol PARAMS ((dw_die_ref));
static void output_die PARAMS ((dw_die_ref));
static void output_compilation_unit_header PARAMS ((void));
static void output_comp_unit PARAMS ((dw_die_ref, int));
static const char *dwarf2_name PARAMS ((tree, int));
static void add_pubname PARAMS ((tree, dw_die_ref));
static void output_pubnames PARAMS ((void));
static void add_arange PARAMS ((tree, dw_die_ref));
static void output_aranges PARAMS ((void));
static unsigned int add_ranges PARAMS ((tree));
static void output_ranges PARAMS ((void));
static void output_line_info PARAMS ((void));
static void output_file_names PARAMS ((void));
static dw_die_ref base_type_die PARAMS ((tree));
static tree root_type PARAMS ((tree));
static int is_base_type PARAMS ((tree));
static dw_die_ref modified_type_die PARAMS ((tree, int, int, dw_die_ref));
static int type_is_enum PARAMS ((tree));
static unsigned int reg_number PARAMS ((rtx));
static dw_loc_descr_ref reg_loc_descriptor PARAMS ((rtx));
static dw_loc_descr_ref int_loc_descriptor PARAMS ((HOST_WIDE_INT));
static dw_loc_descr_ref based_loc_descr PARAMS ((unsigned, long));
static int is_based_loc PARAMS ((rtx));
static dw_loc_descr_ref mem_loc_descriptor PARAMS ((rtx, enum machine_mode mode));
static dw_loc_descr_ref concat_loc_descriptor PARAMS ((rtx, rtx));
static dw_loc_descr_ref loc_descriptor PARAMS ((rtx));
static dw_loc_descr_ref loc_descriptor_from_tree PARAMS ((tree, int));
static HOST_WIDE_INT ceiling PARAMS ((HOST_WIDE_INT, unsigned int));
static tree field_type PARAMS ((tree));
static unsigned int simple_type_align_in_bits PARAMS ((tree));
static unsigned int simple_decl_align_in_bits PARAMS ((tree));
static unsigned HOST_WIDE_INT simple_type_size_in_bits PARAMS ((tree));
static HOST_WIDE_INT field_byte_offset PARAMS ((tree));
static void add_AT_location_description PARAMS ((dw_die_ref,
enum dwarf_attribute,
dw_loc_descr_ref));
static void add_data_member_location_attribute PARAMS ((dw_die_ref, tree));
static void add_const_value_attribute PARAMS ((dw_die_ref, rtx));
static rtx rtl_for_decl_location PARAMS ((tree));
static void add_location_or_const_value_attribute PARAMS ((dw_die_ref, tree));
static void tree_add_const_value_attribute PARAMS ((dw_die_ref, tree));
static void add_name_attribute PARAMS ((dw_die_ref, const char *));
static void add_bound_info PARAMS ((dw_die_ref,
enum dwarf_attribute, tree));
static void add_subscript_info PARAMS ((dw_die_ref, tree));
static void add_byte_size_attribute PARAMS ((dw_die_ref, tree));
static void add_bit_offset_attribute PARAMS ((dw_die_ref, tree));
static void add_bit_size_attribute PARAMS ((dw_die_ref, tree));
static void add_prototyped_attribute PARAMS ((dw_die_ref, tree));
static void add_abstract_origin_attribute PARAMS ((dw_die_ref, tree));
static void add_pure_or_virtual_attribute PARAMS ((dw_die_ref, tree));
static void add_src_coords_attributes PARAMS ((dw_die_ref, tree));
static void add_name_and_src_coords_attributes PARAMS ((dw_die_ref, tree));
static void push_decl_scope PARAMS ((tree));
static void pop_decl_scope PARAMS ((void));
static dw_die_ref scope_die_for PARAMS ((tree, dw_die_ref));
static inline int local_scope_p PARAMS ((dw_die_ref));
static inline int class_scope_p PARAMS ((dw_die_ref));
static void add_type_attribute PARAMS ((dw_die_ref, tree, int, int,
dw_die_ref));
static const char *type_tag PARAMS ((tree));
static tree member_declared_type PARAMS ((tree));
static void gen_array_type_die PARAMS ((tree, dw_die_ref));
static void gen_set_type_die PARAMS ((tree, dw_die_ref));
static void gen_inlined_enumeration_type_die PARAMS ((tree, dw_die_ref));
static void gen_inlined_structure_type_die PARAMS ((tree, dw_die_ref));
static void gen_inlined_union_type_die PARAMS ((tree, dw_die_ref));
static void gen_enumeration_type_die PARAMS ((tree, dw_die_ref));
static dw_die_ref gen_formal_parameter_die PARAMS ((tree, dw_die_ref));
static void gen_unspecified_parameters_die PARAMS ((tree, dw_die_ref));
static void gen_formal_types_die PARAMS ((tree, dw_die_ref));
static void gen_subprogram_die PARAMS ((tree, dw_die_ref));
static void gen_variable_die PARAMS ((tree, dw_die_ref));
static void gen_label_die PARAMS ((tree, dw_die_ref));
static void gen_lexical_block_die PARAMS ((tree, dw_die_ref, int));
static void gen_inlined_subroutine_die PARAMS ((tree, dw_die_ref, int));
static void gen_field_die PARAMS ((tree, dw_die_ref));
static void gen_ptr_to_mbr_type_die PARAMS ((tree, dw_die_ref));
static dw_die_ref gen_compile_unit_die PARAMS ((const char *));
static void gen_string_type_die PARAMS ((tree, dw_die_ref));
static void gen_inheritance_die PARAMS ((tree, dw_die_ref));
static void gen_member_die PARAMS ((tree, dw_die_ref));
static void gen_struct_or_union_type_die PARAMS ((tree, dw_die_ref));
static void gen_subroutine_type_die PARAMS ((tree, dw_die_ref));
static void gen_typedef_die PARAMS ((tree, dw_die_ref));
static void gen_type_die PARAMS ((tree, dw_die_ref));
static void gen_tagged_type_instantiation_die PARAMS ((tree, dw_die_ref));
static void gen_block_die PARAMS ((tree, dw_die_ref, int));
static void decls_for_scope PARAMS ((tree, dw_die_ref, int));
static int is_redundant_typedef PARAMS ((tree));
static void gen_decl_die PARAMS ((tree, dw_die_ref));
static unsigned lookup_filename PARAMS ((const char *));
static void init_file_table PARAMS ((void));
static void retry_incomplete_types PARAMS ((void));
static void gen_type_die_for_member PARAMS ((tree, tree, dw_die_ref));
static void splice_child_die PARAMS ((dw_die_ref, dw_die_ref));
static int file_info_cmp PARAMS ((const void *, const void *));
static dw_loc_list_ref new_loc_list PARAMS ((dw_loc_descr_ref,
const char *, const char *,
const char *, unsigned));
static void add_loc_descr_to_loc_list PARAMS ((dw_loc_list_ref *,
dw_loc_descr_ref,
const char *, const char *, const char *));
static void output_loc_list PARAMS ((dw_loc_list_ref));
static char *gen_internal_sym PARAMS ((const char *));
static void mark_limbo_die_list PARAMS ((void *));
/* Section names used to hold DWARF debugging information. */
#ifndef DEBUG_INFO_SECTION
#define DEBUG_INFO_SECTION ".debug_info"
#endif
#ifndef DEBUG_ABBREV_SECTION
#define DEBUG_ABBREV_SECTION ".debug_abbrev"
#endif
#ifndef DEBUG_ARANGES_SECTION
#define DEBUG_ARANGES_SECTION ".debug_aranges"
#endif
#ifndef DEBUG_MACINFO_SECTION
#define DEBUG_MACINFO_SECTION ".debug_macinfo"
#endif
#ifndef DEBUG_LINE_SECTION
#define DEBUG_LINE_SECTION ".debug_line"
#endif
#ifndef DEBUG_LOC_SECTION
#define DEBUG_LOC_SECTION ".debug_loc"
#endif
#ifndef DEBUG_PUBNAMES_SECTION
#define DEBUG_PUBNAMES_SECTION ".debug_pubnames"
#endif
#ifndef DEBUG_STR_SECTION
#define DEBUG_STR_SECTION ".debug_str"
#endif
#ifndef DEBUG_RANGES_SECTION
#define DEBUG_RANGES_SECTION ".debug_ranges"
#endif
/* Standard ELF section names for compiled code and data. */
#ifndef TEXT_SECTION_NAME
#define TEXT_SECTION_NAME ".text"
#endif
/* Section flags for .debug_str section. */
#ifdef HAVE_GAS_SHF_MERGE
#define DEBUG_STR_SECTION_FLAGS \
(SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1)
#else
#define DEBUG_STR_SECTION_FLAGS SECTION_DEBUG
#endif
/* Labels we insert at beginning sections we can reference instead of
the section names themselves. */
#ifndef TEXT_SECTION_LABEL
#define TEXT_SECTION_LABEL "Ltext"
#endif
#ifndef DEBUG_LINE_SECTION_LABEL
#define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
#endif
#ifndef DEBUG_INFO_SECTION_LABEL
#define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
#endif
#ifndef DEBUG_ABBREV_SECTION_LABEL
#define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
#endif
#ifndef DEBUG_LOC_SECTION_LABEL
#define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
#endif
#ifndef DEBUG_RANGES_SECTION_LABEL
#define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
#endif
#ifndef DEBUG_MACINFO_SECTION_LABEL
#define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
#endif
/* Definitions of defaults for formats and names of various special
(artificial) labels which may be generated within this file (when the -g
options is used and DWARF_DEBUGGING_INFO is in effect.
If necessary, these may be overridden from within the tm.h file, but
typically, overriding these defaults is unnecessary. */
static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
#ifndef TEXT_END_LABEL
#define TEXT_END_LABEL "Letext"
#endif
#ifndef BLOCK_BEGIN_LABEL
#define BLOCK_BEGIN_LABEL "LBB"
#endif
#ifndef BLOCK_END_LABEL
#define BLOCK_END_LABEL "LBE"
#endif
#ifndef LINE_CODE_LABEL
#define LINE_CODE_LABEL "LM"
#endif
#ifndef SEPARATE_LINE_CODE_LABEL
#define SEPARATE_LINE_CODE_LABEL "LSM"
#endif
/* We allow a language front-end to designate a function that is to be
called to "demangle" any name before it it put into a DIE. */
static const char *(*demangle_name_func) PARAMS ((const char *));
void
dwarf2out_set_demangle_name_func (func)
const char *(*func) PARAMS ((const char *));
{
demangle_name_func = func;
}
/* Test if rtl node points to a pseudo register. */
static inline int
is_pseudo_reg (rtl)
rtx rtl;
{
return ((GET_CODE (rtl) == REG && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
|| (GET_CODE (rtl) == SUBREG
&& REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
}
/* Return a reference to a type, with its const and volatile qualifiers
removed. */
static inline tree
type_main_variant (type)
tree type;
{
type = TYPE_MAIN_VARIANT (type);
/* ??? There really should be only one main variant among any group of
variants of a given type (and all of the MAIN_VARIANT values for all
members of the group should point to that one type) but sometimes the C
front-end messes this up for array types, so we work around that bug
here. */
if (TREE_CODE (type) == ARRAY_TYPE)
while (type != TYPE_MAIN_VARIANT (type))
type = TYPE_MAIN_VARIANT (type);
return type;
}
/* Return nonzero if the given type node represents a tagged type. */
static inline int
is_tagged_type (type)
tree type;
{
enum tree_code code = TREE_CODE (type);
return (code == RECORD_TYPE || code == UNION_TYPE
|| code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
}
/* Convert a DIE tag into its string name. */
static const char *
dwarf_tag_name (tag)
unsigned tag;
{
switch (tag)
{
case DW_TAG_padding:
return "DW_TAG_padding";
case DW_TAG_array_type:
return "DW_TAG_array_type";
case DW_TAG_class_type:
return "DW_TAG_class_type";
case DW_TAG_entry_point:
return "DW_TAG_entry_point";
case DW_TAG_enumeration_type:
return "DW_TAG_enumeration_type";
case DW_TAG_formal_parameter:
return "DW_TAG_formal_parameter";
case DW_TAG_imported_declaration:
return "DW_TAG_imported_declaration";
case DW_TAG_label:
return "DW_TAG_label";
case DW_TAG_lexical_block:
return "DW_TAG_lexical_block";
case DW_TAG_member:
return "DW_TAG_member";
case DW_TAG_pointer_type:
return "DW_TAG_pointer_type";
case DW_TAG_reference_type:
return "DW_TAG_reference_type";
case DW_TAG_compile_unit:
return "DW_TAG_compile_unit";
case DW_TAG_string_type:
return "DW_TAG_string_type";
case DW_TAG_structure_type:
return "DW_TAG_structure_type";
case DW_TAG_subroutine_type:
return "DW_TAG_subroutine_type";
case DW_TAG_typedef:
return "DW_TAG_typedef";
case DW_TAG_union_type:
return "DW_TAG_union_type";
case DW_TAG_unspecified_parameters:
return "DW_TAG_unspecified_parameters";
case DW_TAG_variant:
return "DW_TAG_variant";
case DW_TAG_common_block:
return "DW_TAG_common_block";
case DW_TAG_common_inclusion:
return "DW_TAG_common_inclusion";
case DW_TAG_inheritance:
return "DW_TAG_inheritance";
case DW_TAG_inlined_subroutine:
return "DW_TAG_inlined_subroutine";
case DW_TAG_module:
return "DW_TAG_module";
case DW_TAG_ptr_to_member_type:
return "DW_TAG_ptr_to_member_type";
case DW_TAG_set_type:
return "DW_TAG_set_type";
case DW_TAG_subrange_type:
return "DW_TAG_subrange_type";
case DW_TAG_with_stmt:
return "DW_TAG_with_stmt";
case DW_TAG_access_declaration:
return "DW_TAG_access_declaration";
case DW_TAG_base_type:
return "DW_TAG_base_type";
case DW_TAG_catch_block:
return "DW_TAG_catch_block";
case DW_TAG_const_type:
return "DW_TAG_const_type";
case DW_TAG_constant:
return "DW_TAG_constant";
case DW_TAG_enumerator:
return "DW_TAG_enumerator";
case DW_TAG_file_type:
return "DW_TAG_file_type";
case DW_TAG_friend:
return "DW_TAG_friend";
case DW_TAG_namelist:
return "DW_TAG_namelist";
case DW_TAG_namelist_item:
return "DW_TAG_namelist_item";
case DW_TAG_packed_type:
return "DW_TAG_packed_type";
case DW_TAG_subprogram:
return "DW_TAG_subprogram";
case DW_TAG_template_type_param:
return "DW_TAG_template_type_param";
case DW_TAG_template_value_param:
return "DW_TAG_template_value_param";
case DW_TAG_thrown_type:
return "DW_TAG_thrown_type";
case DW_TAG_try_block:
return "DW_TAG_try_block";
case DW_TAG_variant_part:
return "DW_TAG_variant_part";
case DW_TAG_variable:
return "DW_TAG_variable";
case DW_TAG_volatile_type:
return "DW_TAG_volatile_type";
case DW_TAG_MIPS_loop:
return "DW_TAG_MIPS_loop";
case DW_TAG_format_label:
return "DW_TAG_format_label";
case DW_TAG_function_template:
return "DW_TAG_function_template";
case DW_TAG_class_template:
return "DW_TAG_class_template";
case DW_TAG_GNU_BINCL:
return "DW_TAG_GNU_BINCL";
case DW_TAG_GNU_EINCL:
return "DW_TAG_GNU_EINCL";
default:
return "DW_TAG_<unknown>";
}
}
/* Convert a DWARF attribute code into its string name. */
static const char *
dwarf_attr_name (attr)
unsigned attr;
{
switch (attr)
{
case DW_AT_sibling:
return "DW_AT_sibling";
case DW_AT_location:
return "DW_AT_location";
case DW_AT_name:
return "DW_AT_name";
case DW_AT_ordering:
return "DW_AT_ordering";
case DW_AT_subscr_data:
return "DW_AT_subscr_data";
case DW_AT_byte_size:
return "DW_AT_byte_size";
case DW_AT_bit_offset:
return "DW_AT_bit_offset";
case DW_AT_bit_size:
return "DW_AT_bit_size";
case DW_AT_element_list:
return "DW_AT_element_list";
case DW_AT_stmt_list:
return "DW_AT_stmt_list";
case DW_AT_low_pc:
return "DW_AT_low_pc";
case DW_AT_high_pc:
return "DW_AT_high_pc";
case DW_AT_language:
return "DW_AT_language";
case DW_AT_member:
return "DW_AT_member";
case DW_AT_discr:
return "DW_AT_discr";
case DW_AT_discr_value:
return "DW_AT_discr_value";
case DW_AT_visibility:
return "DW_AT_visibility";
case DW_AT_import:
return "DW_AT_import";
case DW_AT_string_length:
return "DW_AT_string_length";
case DW_AT_common_reference:
return "DW_AT_common_reference";
case DW_AT_comp_dir:
return "DW_AT_comp_dir";
case DW_AT_const_value:
return "DW_AT_const_value";
case DW_AT_containing_type:
return "DW_AT_containing_type";
case DW_AT_default_value:
return "DW_AT_default_value";
case DW_AT_inline:
return "DW_AT_inline";
case DW_AT_is_optional:
return "DW_AT_is_optional";
case DW_AT_lower_bound:
return "DW_AT_lower_bound";
case DW_AT_producer:
return "DW_AT_producer";
case DW_AT_prototyped:
return "DW_AT_prototyped";
case DW_AT_return_addr:
return "DW_AT_return_addr";
case DW_AT_start_scope:
return "DW_AT_start_scope";
case DW_AT_stride_size:
return "DW_AT_stride_size";
case DW_AT_upper_bound:
return "DW_AT_upper_bound";
case DW_AT_abstract_origin:
return "DW_AT_abstract_origin";
case DW_AT_accessibility:
return "DW_AT_accessibility";
case DW_AT_address_class:
return "DW_AT_address_class";
case DW_AT_artificial:
return "DW_AT_artificial";
case DW_AT_base_types:
return "DW_AT_base_types";
case DW_AT_calling_convention:
return "DW_AT_calling_convention";
case DW_AT_count:
return "DW_AT_count";
case DW_AT_data_member_location:
return "DW_AT_data_member_location";
case DW_AT_decl_column:
return "DW_AT_decl_column";
case DW_AT_decl_file:
return "DW_AT_decl_file";
case DW_AT_decl_line:
return "DW_AT_decl_line";
case DW_AT_declaration:
return "DW_AT_declaration";
case DW_AT_discr_list:
return "DW_AT_discr_list";
case DW_AT_encoding:
return "DW_AT_encoding";
case DW_AT_external:
return "DW_AT_external";
case DW_AT_frame_base:
return "DW_AT_frame_base";
case DW_AT_friend:
return "DW_AT_friend";
case DW_AT_identifier_case:
return "DW_AT_identifier_case";
case DW_AT_macro_info:
return "DW_AT_macro_info";
case DW_AT_namelist_items:
return "DW_AT_namelist_items";
case DW_AT_priority:
return "DW_AT_priority";
case DW_AT_segment:
return "DW_AT_segment";
case DW_AT_specification:
return "DW_AT_specification";
case DW_AT_static_link:
return "DW_AT_static_link";
case DW_AT_type:
return "DW_AT_type";
case DW_AT_use_location:
return "DW_AT_use_location";
case DW_AT_variable_parameter:
return "DW_AT_variable_parameter";
case DW_AT_virtuality:
return "DW_AT_virtuality";
case DW_AT_vtable_elem_location:
return "DW_AT_vtable_elem_location";
case DW_AT_allocated:
return "DW_AT_allocated";
case DW_AT_associated:
return "DW_AT_associated";
case DW_AT_data_location:
return "DW_AT_data_location";
case DW_AT_stride:
return "DW_AT_stride";
case DW_AT_entry_pc:
return "DW_AT_entry_pc";
case DW_AT_use_UTF8:
return "DW_AT_use_UTF8";
case DW_AT_extension:
return "DW_AT_extension";
case DW_AT_ranges:
return "DW_AT_ranges";
case DW_AT_trampoline:
return "DW_AT_trampoline";
case DW_AT_call_column:
return "DW_AT_call_column";
case DW_AT_call_file:
return "DW_AT_call_file";
case DW_AT_call_line:
return "DW_AT_call_line";
case DW_AT_MIPS_fde:
return "DW_AT_MIPS_fde";
case DW_AT_MIPS_loop_begin:
return "DW_AT_MIPS_loop_begin";
case DW_AT_MIPS_tail_loop_begin:
return "DW_AT_MIPS_tail_loop_begin";
case DW_AT_MIPS_epilog_begin:
return "DW_AT_MIPS_epilog_begin";
case DW_AT_MIPS_loop_unroll_factor:
return "DW_AT_MIPS_loop_unroll_factor";
case DW_AT_MIPS_software_pipeline_depth:
return "DW_AT_MIPS_software_pipeline_depth";
case DW_AT_MIPS_linkage_name:
return "DW_AT_MIPS_linkage_name";
case DW_AT_MIPS_stride:
return "DW_AT_MIPS_stride";
case DW_AT_MIPS_abstract_name:
return "DW_AT_MIPS_abstract_name";
case DW_AT_MIPS_clone_origin:
return "DW_AT_MIPS_clone_origin";
case DW_AT_MIPS_has_inlines:
return "DW_AT_MIPS_has_inlines";
case DW_AT_sf_names:
return "DW_AT_sf_names";
case DW_AT_src_info:
return "DW_AT_src_info";
case DW_AT_mac_info:
return "DW_AT_mac_info";
case DW_AT_src_coords:
return "DW_AT_src_coords";
case DW_AT_body_begin:
return "DW_AT_body_begin";
case DW_AT_body_end:
return "DW_AT_body_end";
case DW_AT_GNU_vector:
return "DW_AT_GNU_vector";
case DW_AT_VMS_rtnbeg_pd_address:
return "DW_AT_VMS_rtnbeg_pd_address";
default:
return "DW_AT_<unknown>";
}
}
/* Convert a DWARF value form code into its string name. */
static const char *
dwarf_form_name (form)
unsigned form;
{
switch (form)
{
case DW_FORM_addr:
return "DW_FORM_addr";
case DW_FORM_block2:
return "DW_FORM_block2";
case DW_FORM_block4:
return "DW_FORM_block4";
case DW_FORM_data2:
return "DW_FORM_data2";
case DW_FORM_data4:
return "DW_FORM_data4";
case DW_FORM_data8:
return "DW_FORM_data8";
case DW_FORM_string:
return "DW_FORM_string";
case DW_FORM_block:
return "DW_FORM_block";
case DW_FORM_block1:
return "DW_FORM_block1";
case DW_FORM_data1:
return "DW_FORM_data1";
case DW_FORM_flag:
return "DW_FORM_flag";
case DW_FORM_sdata:
return "DW_FORM_sdata";
case DW_FORM_strp:
return "DW_FORM_strp";
case DW_FORM_udata:
return "DW_FORM_udata";
case DW_FORM_ref_addr:
return "DW_FORM_ref_addr";
case DW_FORM_ref1:
return "DW_FORM_ref1";
case DW_FORM_ref2:
return "DW_FORM_ref2";
case DW_FORM_ref4:
return "DW_FORM_ref4";
case DW_FORM_ref8:
return "DW_FORM_ref8";
case DW_FORM_ref_udata:
return "DW_FORM_ref_udata";
case DW_FORM_indirect:
return "DW_FORM_indirect";
default:
return "DW_FORM_<unknown>";
}
}
/* Convert a DWARF type code into its string name. */
/* Determine the "ultimate origin" of a decl. The decl may be an inlined
instance of an inlined instance of a decl which is local to an inline
function, so we have to trace all of the way back through the origin chain
to find out what sort of node actually served as the original seed for the
given block. */
static tree
decl_ultimate_origin (decl)
tree decl;
{
/* output_inline_function sets DECL_ABSTRACT_ORIGIN for all the
nodes in the function to point to themselves; ignore that if
we're trying to output the abstract instance of this function. */
if (DECL_ABSTRACT (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
return NULL_TREE;
#ifdef ENABLE_CHECKING
if (DECL_FROM_INLINE (DECL_ORIGIN (decl)))
/* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
most distant ancestor, this should never happen. */
abort ();
#endif
return DECL_ABSTRACT_ORIGIN (decl);
}
/* Determine the "ultimate origin" of a block. The block may be an inlined
instance of an inlined instance of a block which is local to an inline
function, so we have to trace all of the way back through the origin chain
to find out what sort of node actually served as the original seed for the
given block. */
static tree
block_ultimate_origin (block)
tree block;
{
tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
/* output_inline_function sets BLOCK_ABSTRACT_ORIGIN for all the
nodes in the function to point to themselves; ignore that if
we're trying to output the abstract instance of this function. */
if (BLOCK_ABSTRACT (block) && immediate_origin == block)
return NULL_TREE;
if (immediate_origin == NULL_TREE)
return NULL_TREE;
else
{
tree ret_val;
tree lookahead = immediate_origin;
do
{
ret_val = lookahead;
lookahead = (TREE_CODE (ret_val) == BLOCK
? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
}
while (lookahead != NULL && lookahead != ret_val);
return ret_val;
}
}
/* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
of a virtual function may refer to a base class, so we check the 'this'
parameter. */
static tree
decl_class_context (decl)
tree decl;
{
tree context = NULL_TREE;
if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
context = DECL_CONTEXT (decl);
else
context = TYPE_MAIN_VARIANT
(TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
if (context && !TYPE_P (context))
context = NULL_TREE;
return context;
}
/* Add an attribute/value pair to a DIE. We build the lists up in reverse
addition order, and correct that in reverse_all_dies. */
static inline void
add_dwarf_attr (die, attr)
dw_die_ref die;
dw_attr_ref attr;
{
if (die != NULL && attr != NULL)
{
attr->dw_attr_next = die->die_attr;
die->die_attr = attr;
}
}
static inline dw_val_class
AT_class (a)
dw_attr_ref a;
{
return a->dw_attr_val.val_class;
}
/* Add a flag value attribute to a DIE. */
static inline void
add_AT_flag (die, attr_kind, flag)
dw_die_ref die;
enum dwarf_attribute attr_kind;
unsigned flag;
{
dw_attr_ref attr = (dw_attr_ref) xmalloc (sizeof (dw_attr_node));
attr->dw_attr_next = NULL;
attr->dw_attr = attr_kind;
attr->dw_attr_val.val_class = dw_val_class_flag;
attr->dw_attr_val.v.val_flag = flag;
add_dwarf_attr (die, attr);
}
static inline unsigned
AT_flag (a)
dw_attr_ref a;
{
if (a && AT_class (a) == dw_val_class_flag)
return a->dw_attr_val.v.val_flag;
abort ();
}
/* Add a signed integer attribute value to a DIE. */
static inline void
add_AT_int (die, attr_kind, int_val)
dw_die_ref die;
enum dwarf_attribute attr_kind;
long int int_val;
{
dw_attr_ref attr = (dw_attr_ref) xmalloc (sizeof (dw_attr_node));
attr->dw_attr_next = NULL;
attr->dw_attr = attr_kind;
attr->dw_attr_val.val_class = dw_val_class_const;
attr->dw_attr_val.v.val_int = int_val;
add_dwarf_attr (die, attr);
}
static inline long int
AT_int (a)
dw_attr_ref a;
{
if (a && AT_class (a) == dw_val_class_const)
return a->dw_attr_val.v.val_int;
abort ();
}
/* Add an unsigned integer attribute value to a DIE. */
static inline void
add_AT_unsigned (die, attr_kind, unsigned_val)
dw_die_ref die;
enum dwarf_attribute attr_kind;
unsigned long unsigned_val;
{
dw_attr_ref attr = (dw_attr_ref) xmalloc (sizeof (dw_attr_node));
attr->dw_attr_next = NULL;
attr->dw_attr = attr_kind;
attr->dw_attr_val.val_class = dw_val_class_unsigned_const;
attr->dw_attr_val.v.val_unsigned = unsigned_val;
add_dwarf_attr (die, attr);
}
static inline unsigned long
AT_unsigned (a)
dw_attr_ref a;
{
if (a && AT_class (a) == dw_val_class_unsigned_const)
return a->dw_attr_val.v.val_unsigned;
abort ();
}
/* Add an unsigned double integer attribute value to a DIE. */
static inline void
add_AT_long_long (die, attr_kind, val_hi, val_low)
dw_die_ref die;
enum dwarf_attribute attr_kind;
unsigned long val_hi;
unsigned long val_low;
{
dw_attr_ref attr = (dw_attr_ref) xmalloc (sizeof (dw_attr_node));
attr->dw_attr_next = NULL;
attr->dw_attr = attr_kind;
attr->dw_attr_val.val_class = dw_val_class_long_long;
attr->dw_attr_val.v.val_long_long.hi = val_hi;
attr->dw_attr_val.v.val_long_long.low = val_low;
add_dwarf_attr (die, attr);
}
/* Add a floating point attribute value to a DIE and return it. */
static inline void
add_AT_float (die, attr_kind, length, array)
dw_die_ref die;
enum dwarf_attribute attr_kind;
unsigned length;
long *array;
{
dw_attr_ref attr = (dw_attr_ref) xmalloc (sizeof (dw_attr_node));
attr->dw_attr_next = NULL;
attr->dw_attr = attr_kind;
attr->dw_attr_val.val_class = dw_val_class_float;
attr->dw_attr_val.v.val_float.length = length;
attr->dw_attr_val.v.val_float.array = array;
add_dwarf_attr (die, attr);
}
/* Add a string attribute value to a DIE. */
static inline void
add_AT_string (die, attr_kind, str)
dw_die_ref die;
enum dwarf_attribute attr_kind;
const char *str;
{
dw_attr_ref attr = (dw_attr_ref) xmalloc (sizeof (dw_attr_node));
struct indirect_string_node *node;
if (! debug_str_hash)
{
debug_str_hash = ht_create (10);
debug_str_hash->alloc_node = indirect_string_alloc;
}
node = (struct indirect_string_node *)
ht_lookup (debug_str_hash, (const unsigned char *) str,
strlen (str), HT_ALLOC);
node->refcount++;
attr->dw_attr_next = NULL;
attr->dw_attr = attr_kind;
attr->dw_attr_val.val_class = dw_val_class_str;
attr->dw_attr_val.v.val_str = node;
add_dwarf_attr (die, attr);
}
static inline const char *
AT_string (a)
dw_attr_ref a;
{
if (a && AT_class (a) == dw_val_class_str)
return (const char *) HT_STR (&a->dw_attr_val.v.val_str->id);
abort ();
}
/* Find out whether a string should be output inline in DIE
or out-of-line in .debug_str section. */
static int
AT_string_form (a)
dw_attr_ref a;
{
if (a && AT_class (a) == dw_val_class_str)
{
struct indirect_string_node *node;
unsigned int len;
extern int const_labelno;
char label[32];
node = a->dw_attr_val.v.val_str;
if (node->form)
return node->form;
len = HT_LEN (&node->id) + 1;
/* If the string is shorter or equal to the size of the reference, it is
always better to put it inline. */
if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
return node->form = DW_FORM_string;
/* If we cannot expect the linker to merge strings in .debug_str
section, only put it into .debug_str if it is worth even in this
single module. */
if ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) == 0
&& (len - DWARF_OFFSET_SIZE) * node->refcount <= len)
return node->form = DW_FORM_string;
ASM_GENERATE_INTERNAL_LABEL (label, "LC", const_labelno);
++const_labelno;
node->label = xstrdup (label);
return node->form = DW_FORM_strp;
}
abort ();
}
/* Add a DIE reference attribute value to a DIE. */
static inline void
add_AT_die_ref (die, attr_kind, targ_die)
dw_die_ref die;
enum dwarf_attribute attr_kind;
dw_die_ref targ_die;
{
dw_attr_ref attr = (dw_attr_ref) xmalloc (sizeof (dw_attr_node));
attr->dw_attr_next = NULL;
attr->dw_attr = attr_kind;
attr->dw_attr_val.val_class = dw_val_class_die_ref;
attr->dw_attr_val.v.val_die_ref.die = targ_die;
attr->dw_attr_val.v.val_die_ref.external = 0;
add_dwarf_attr (die, attr);
}
static inline dw_die_ref
AT_ref (a)
dw_attr_ref a;
{
if (a && AT_class (a) == dw_val_class_die_ref)
return a->dw_attr_val.v.val_die_ref.die;
abort ();
}
static inline int
AT_ref_external (a)
dw_attr_ref a;
{
if (a && AT_class (a) == dw_val_class_die_ref)
return a->dw_attr_val.v.val_die_ref.external;
return 0;
}
static inline void
set_AT_ref_external (a, i)
dw_attr_ref a;
int i;
{
if (a && AT_class (a) == dw_val_class_die_ref)
a->dw_attr_val.v.val_die_ref.external = i;
else
abort ();
}
/* Add an FDE reference attribute value to a DIE. */
static inline void
add_AT_fde_ref (die, attr_kind, targ_fde)
dw_die_ref die;
enum dwarf_attribute attr_kind;
unsigned targ_fde;
{
dw_attr_ref attr = (dw_attr_ref) xmalloc (sizeof (dw_attr_node));
attr->dw_attr_next = NULL;
attr->dw_attr = attr_kind;
attr->dw_attr_val.val_class = dw_val_class_fde_ref;
attr->dw_attr_val.v.val_fde_index = targ_fde;
add_dwarf_attr (die, attr);
}
/* Add a location description attribute value to a DIE. */
static inline void
add_AT_loc (die, attr_kind, loc)
dw_die_ref die;
enum dwarf_attribute attr_kind;
dw_loc_descr_ref loc;
{
dw_attr_ref attr = (dw_attr_ref) xmalloc (sizeof (dw_attr_node));
attr->dw_attr_next = NULL;
attr->dw_attr = attr_kind;
attr->dw_attr_val.val_class = dw_val_class_loc;
attr->dw_attr_val.v.val_loc = loc;
add_dwarf_attr (die, attr);
}
static inline dw_loc_descr_ref
AT_loc (a)
dw_attr_ref a;
{
if (a && AT_class (a) == dw_val_class_loc)
return a->dw_attr_val.v.val_loc;
abort ();
}
static inline void
add_AT_loc_list (die, attr_kind, loc_list)
dw_die_ref die;
enum dwarf_attribute attr_kind;
dw_loc_list_ref loc_list;
{
dw_attr_ref attr = (dw_attr_ref) xmalloc (sizeof (dw_attr_node));
attr->dw_attr_next = NULL;
attr->dw_attr = attr_kind;
attr->dw_attr_val.val_class = dw_val_class_loc_list;
attr->dw_attr_val.v.val_loc_list = loc_list;
add_dwarf_attr (die, attr);
have_location_lists = 1;
}
static inline dw_loc_list_ref
AT_loc_list (a)
dw_attr_ref a;
{
if (a && AT_class (a) == dw_val_class_loc_list)
return a->dw_attr_val.v.val_loc_list;
abort ();
}
/* Add an address constant attribute value to a DIE. */
static inline void
add_AT_addr (die, attr_kind, addr)
dw_die_ref die;
enum dwarf_attribute attr_kind;
rtx addr;
{
dw_attr_ref attr = (dw_attr_ref) xmalloc (sizeof (dw_attr_node));
attr->dw_attr_next = NULL;
attr->dw_attr = attr_kind;
attr->dw_attr_val.val_class = dw_val_class_addr;
attr->dw_attr_val.v.val_addr = addr;
add_dwarf_attr (die, attr);
}
static inline rtx
AT_addr (a)
dw_attr_ref a;
{
if (a && AT_class (a) == dw_val_class_addr)
return a->dw_attr_val.v.val_addr;
abort ();
}
/* Add a label identifier attribute value to a DIE. */
static inline void
add_AT_lbl_id (die, attr_kind, lbl_id)
dw_die_ref die;
enum dwarf_attribute attr_kind;
const char *lbl_id;
{
dw_attr_ref attr = (dw_attr_ref) xmalloc (sizeof (dw_attr_node));
attr->dw_attr_next = NULL;
attr->dw_attr = attr_kind;
attr->dw_attr_val.val_class = dw_val_class_lbl_id;
attr->dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
add_dwarf_attr (die, attr);
}
/* Add a section offset attribute value to a DIE. */
static inline void
add_AT_lbl_offset (die, attr_kind, label)
dw_die_ref die;
enum dwarf_attribute attr_kind;
const char *label;
{
dw_attr_ref attr = (dw_attr_ref) xmalloc (sizeof (dw_attr_node));
attr->dw_attr_next = NULL;
attr->dw_attr = attr_kind;
attr->dw_attr_val.val_class = dw_val_class_lbl_offset;
attr->dw_attr_val.v.val_lbl_id = xstrdup (label);
add_dwarf_attr (die, attr);
}
/* Add an offset attribute value to a DIE. */
static inline void
add_AT_offset (die, attr_kind, offset)
dw_die_ref die;
enum dwarf_attribute attr_kind;
unsigned long offset;
{
dw_attr_ref attr = (dw_attr_ref) xmalloc (sizeof (dw_attr_node));
attr->dw_attr_next = NULL;
attr->dw_attr = attr_kind;
attr->dw_attr_val.val_class = dw_val_class_offset;
attr->dw_attr_val.v.val_offset = offset;
add_dwarf_attr (die, attr);
}
/* Add an range_list attribute value to a DIE. */
static void
add_AT_range_list (die, attr_kind, offset)
dw_die_ref die;
enum dwarf_attribute attr_kind;
unsigned long offset;
{
dw_attr_ref attr = (dw_attr_ref) xmalloc (sizeof (dw_attr_node));
attr->dw_attr_next = NULL;
attr->dw_attr = attr_kind;
attr->dw_attr_val.val_class = dw_val_class_range_list;
attr->dw_attr_val.v.val_offset = offset;
add_dwarf_attr (die, attr);
}
static inline const char *
AT_lbl (a)
dw_attr_ref a;
{
if (a && (AT_class (a) == dw_val_class_lbl_id
|| AT_class (a) == dw_val_class_lbl_offset))
return a->dw_attr_val.v.val_lbl_id;
abort ();
}
/* Get the attribute of type attr_kind. */
static inline dw_attr_ref
get_AT (die, attr_kind)
dw_die_ref die;
enum dwarf_attribute attr_kind;
{
dw_attr_ref a;
dw_die_ref spec = NULL;
if (die != NULL)
{
for (a = die->die_attr; a != NULL; a = a->dw_attr_next)
if (a->dw_attr == attr_kind)
return a;
else if (a->dw_attr == DW_AT_specification
|| a->dw_attr == DW_AT_abstract_origin)
spec = AT_ref (a);
if (spec)
return get_AT (spec, attr_kind);
}
return NULL;
}
/* Return the "low pc" attribute value, typically associated with a subprogram
DIE. Return null if the "low pc" attribute is either not present, or if it
cannot be represented as an assembler label identifier. */
static inline const char *
get_AT_low_pc (die)
dw_die_ref die;
{
dw_attr_ref a = get_AT (die, DW_AT_low_pc);
return a ? AT_lbl (a) : NULL;
}
/* Return the "high pc" attribute value, typically associated with a subprogram
DIE. Return null if the "high pc" attribute is either not present, or if it
cannot be represented as an assembler label identifier. */
static inline const char *
get_AT_hi_pc (die)
dw_die_ref die;
{
dw_attr_ref a = get_AT (die, DW_AT_high_pc);
return a ? AT_lbl (a) : NULL;
}
/* Return the value of the string attribute designated by ATTR_KIND, or
NULL if it is not present. */
static inline const char *
get_AT_string (die, attr_kind)
dw_die_ref die;
enum dwarf_attribute attr_kind;
{
dw_attr_ref a = get_AT (die, attr_kind);
return a ? AT_string (a) : NULL;
}
/* Return the value of the flag attribute designated by ATTR_KIND, or -1
if it is not present. */
static inline int
get_AT_flag (die, attr_kind)
dw_die_ref die;
enum dwarf_attribute attr_kind;
{
dw_attr_ref a = get_AT (die, attr_kind);
return a ? AT_flag (a) : 0;
}
/* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
if it is not present. */
static inline unsigned
get_AT_unsigned (die, attr_kind)
dw_die_ref die;
enum dwarf_attribute attr_kind;
{
dw_attr_ref a = get_AT (die, attr_kind);
return a ? AT_unsigned (a) : 0;
}
static inline dw_die_ref
get_AT_ref (die, attr_kind)
dw_die_ref die;
enum dwarf_attribute attr_kind;
{
dw_attr_ref a = get_AT (die, attr_kind);
return a ? AT_ref (a) : NULL;
}
static inline int
is_c_family ()
{
unsigned lang = get_AT_unsigned (comp_unit_die, DW_AT_language);
return (lang == DW_LANG_C || lang == DW_LANG_C89
|| lang == DW_LANG_C_plus_plus);
}
static inline int
is_cxx ()
{
return (get_AT_unsigned (comp_unit_die, DW_AT_language)
== DW_LANG_C_plus_plus);
}
static inline int
is_fortran ()
{
unsigned lang = get_AT_unsigned (comp_unit_die, DW_AT_language);
return (lang == DW_LANG_Fortran77 || lang == DW_LANG_Fortran90);
}
static inline int
is_java ()
{
unsigned lang = get_AT_unsigned (comp_unit_die, DW_AT_language);
return (lang == DW_LANG_Java);
}
/* Free up the memory used by A. */
static inline void free_AT PARAMS ((dw_attr_ref));
static inline void
free_AT (a)
dw_attr_ref a;
{
switch (AT_class (a))
{
case dw_val_class_str:
if (a->dw_attr_val.v.val_str->refcount)
a->dw_attr_val.v.val_str->refcount--;
break;
case dw_val_class_lbl_id:
case dw_val_class_lbl_offset:
free (a->dw_attr_val.v.val_lbl_id);
break;
case dw_val_class_float:
free (a->dw_attr_val.v.val_float.array);
break;
default:
break;
}
free (a);
}
/* Remove the specified attribute if present. */
static void
remove_AT (die, attr_kind)
dw_die_ref die;
enum dwarf_attribute attr_kind;
{
dw_attr_ref *p;
dw_attr_ref removed = NULL;
if (die != NULL)
{
for (p = &(die->die_attr); *p; p = &((*p)->dw_attr_next))
if ((*p)->dw_attr == attr_kind)
{
removed = *p;
*p = (*p)->dw_attr_next;
break;
}
if (removed != 0)
free_AT (removed);
}
}
/* Free up the memory used by DIE. */
static inline void
free_die (die)
dw_die_ref die;
{
remove_children (die);
free (die);
}
/* Discard the children of this DIE. */
static void
remove_children (die)
dw_die_ref die;
{
dw_die_ref child_die = die->die_child;
die->die_child = NULL;
while (child_die != NULL)
{
dw_die_ref tmp_die = child_die;
dw_attr_ref a;
child_die = child_die->die_sib;
for (a = tmp_die->die_attr; a != NULL;)
{
dw_attr_ref tmp_a = a;
a = a->dw_attr_next;
free_AT (tmp_a);
}
free_die (tmp_die);
}
}
/* Add a child DIE below its parent. We build the lists up in reverse
addition order, and correct that in reverse_all_dies. */
static inline void
add_child_die (die, child_die)
dw_die_ref die;
dw_die_ref child_die;
{
if (die != NULL && child_die != NULL)
{
if (die == child_die)
abort ();
child_die->die_parent = die;
child_die->die_sib = die->die_child;
die->die_child = child_die;
}
}
/* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
is the specification, to the front of PARENT's list of children. */
static void
splice_child_die (parent, child)
dw_die_ref parent, child;
{
dw_die_ref *p;
/* We want the declaration DIE from inside the class, not the
specification DIE at toplevel. */
if (child->die_parent != parent)
{
dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
if (tmp)
child = tmp;
}
if (child->die_parent != parent
&& child->die_parent != get_AT_ref (parent, DW_AT_specification))
abort ();
for (p = &(child->die_parent->die_child); *p; p = &((*p)->die_sib))
if (*p == child)
{
*p = child->die_sib;
break;
}
child->die_sib = parent->die_child;
parent->die_child = child;
}
/* Return a pointer to a newly created DIE node. */
static inline dw_die_ref
new_die (tag_value, parent_die, t)
enum dwarf_tag tag_value;
dw_die_ref parent_die;
tree t;
{
dw_die_ref die = (dw_die_ref) xcalloc (1, sizeof (die_node));
die->die_tag = tag_value;
if (parent_die != NULL)
add_child_die (parent_die, die);
else
{
limbo_die_node *limbo_node;
limbo_node = (limbo_die_node *) xmalloc (sizeof (limbo_die_node));
limbo_node->die = die;
limbo_node->created_for = t;
limbo_node->next = limbo_die_list;
limbo_die_list = limbo_node;
}
return die;
}
/* Return the DIE associated with the given type specifier. */
static inline dw_die_ref
lookup_type_die (type)
tree type;
{
return TYPE_SYMTAB_DIE (type);
}
/* Equate a DIE to a given type specifier. */
static inline void
equate_type_number_to_die (type, type_die)
tree type;
dw_die_ref type_die;
{
TYPE_SYMTAB_DIE (type) = type_die;
}
/* Return the DIE associated with a given declaration. */
static inline dw_die_ref
lookup_decl_die (decl)
tree decl;
{
unsigned decl_id = DECL_UID (decl);
return (decl_id < decl_die_table_in_use ? decl_die_table[decl_id] : NULL);
}
/* Equate a DIE to a particular declaration. */
static void
equate_decl_number_to_die (decl, decl_die)
tree decl;
dw_die_ref decl_die;
{
unsigned int decl_id = DECL_UID (decl);
unsigned int num_allocated;
if (decl_id >= decl_die_table_allocated)
{
num_allocated
= ((decl_id + 1 + DECL_DIE_TABLE_INCREMENT - 1)
/ DECL_DIE_TABLE_INCREMENT)
* DECL_DIE_TABLE_INCREMENT;
decl_die_table
= (dw_die_ref *) xrealloc (decl_die_table,
sizeof (dw_die_ref) * num_allocated);
memset ((char *) &decl_die_table[decl_die_table_allocated], 0,
(num_allocated - decl_die_table_allocated) * sizeof (dw_die_ref));
decl_die_table_allocated = num_allocated;
}
if (decl_id >= decl_die_table_in_use)
decl_die_table_in_use = (decl_id + 1);
decl_die_table[decl_id] = decl_die;
}
/* Keep track of the number of spaces used to indent the
output of the debugging routines that print the structure of
the DIE internal representation. */
static int print_indent;
/* Indent the line the number of spaces given by print_indent. */
static inline void
print_spaces (outfile)
FILE *outfile;
{
fprintf (outfile, "%*s", print_indent, "");
}
/* Print the information associated with a given DIE, and its children.
This routine is a debugging aid only. */
static void
print_die (die, outfile)
dw_die_ref die;
FILE *outfile;
{
dw_attr_ref a;
dw_die_ref c;
print_spaces (outfile);
fprintf (outfile, "DIE %4lu: %s\n",
die->die_offset, dwarf_tag_name (die->die_tag));
print_spaces (outfile);
fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
fprintf (outfile, " offset: %lu\n", die->die_offset);
for (a = die->die_attr; a != NULL; a = a->dw_attr_next)
{
print_spaces (outfile);
fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
switch (AT_class (a))
{
case dw_val_class_addr:
fprintf (outfile, "address");
break;
case dw_val_class_offset:
fprintf (outfile, "offset");
break;
case dw_val_class_loc:
fprintf (outfile, "location descriptor");
break;
case dw_val_class_loc_list:
fprintf (outfile, "location list -> label:%s",
AT_loc_list (a)->ll_symbol);
break;
case dw_val_class_range_list:
fprintf (outfile, "range list");
break;
case dw_val_class_const:
fprintf (outfile, "%ld", AT_int (a));
break;
case dw_val_class_unsigned_const:
fprintf (outfile, "%lu", AT_unsigned (a));
break;
case dw_val_class_long_long:
fprintf (outfile, "constant (%lu,%lu)",
a->dw_attr_val.v.val_long_long.hi,
a->dw_attr_val.v.val_long_long.low);
break;
case dw_val_class_float:
fprintf (outfile, "floating-point constant");
break;
case dw_val_class_flag:
fprintf (outfile, "%u", AT_flag (a));
break;
case dw_val_class_die_ref:
if (AT_ref (a) != NULL)
{
if (AT_ref (a)->die_symbol)
fprintf (outfile, "die -> label: %s", AT_ref (a)->die_symbol);
else
fprintf (outfile, "die -> %lu", AT_ref (a)->die_offset);
}
else
fprintf (outfile, "die -> <null>");
break;
case dw_val_class_lbl_id:
case dw_val_class_lbl_offset:
fprintf (outfile, "label: %s", AT_lbl (a));
break;
case dw_val_class_str:
if (AT_string (a) != NULL)
fprintf (outfile, "\"%s\"", AT_string (a));
else
fprintf (outfile, "<null>");
break;
default:
break;
}
fprintf (outfile, "\n");
}
if (die->die_child != NULL)
{
print_indent += 4;
for (c = die->die_child; c != NULL; c = c->die_sib)
print_die (c, outfile);
print_indent -= 4;
}
if (print_indent == 0)
fprintf (outfile, "\n");
}
/* Print the contents of the source code line number correspondence table.
This routine is a debugging aid only. */
static void
print_dwarf_line_table (outfile)
FILE *outfile;
{
unsigned i;
dw_line_info_ref line_info;
fprintf (outfile, "\n\nDWARF source line information\n");
for (i = 1; i < line_info_table_in_use; i++)
{
line_info = &line_info_table[i];
fprintf (outfile, "%5d: ", i);
fprintf (outfile, "%-20s", file_table.table[line_info->dw_file_num]);
fprintf (outfile, "%6ld", line_info->dw_line_num);
fprintf (outfile, "\n");
}
fprintf (outfile, "\n\n");
}
/* Print the information collected for a given DIE. */
void
debug_dwarf_die (die)
dw_die_ref die;
{
print_die (die, stderr);
}
/* Print all DWARF information collected for the compilation unit.
This routine is a debugging aid only. */
void
debug_dwarf ()
{
print_indent = 0;
print_die (comp_unit_die, stderr);
if (! DWARF2_ASM_LINE_DEBUG_INFO)
print_dwarf_line_table (stderr);
}
/* We build up the lists of children and attributes by pushing new ones
onto the beginning of the list. Reverse the lists for DIE so that
they are in order of addition. */
static void
reverse_die_lists (die)
dw_die_ref die;
{
dw_die_ref c, cp, cn;
dw_attr_ref a, ap, an;
for (a = die->die_attr, ap = 0; a; a = an)
{
an = a->dw_attr_next;
a->dw_attr_next = ap;
ap = a;
}
die->die_attr = ap;
for (c = die->die_child, cp = 0; c; c = cn)
{
cn = c->die_sib;
c->die_sib = cp;
cp = c;
}
die->die_child = cp;
}
/* reverse_die_lists only reverses the single die you pass it. Since we used to
reverse all dies in add_sibling_attributes, which runs through all the dies,
it would reverse all the dies. Now, however, since we don't call
reverse_die_lists in add_sibling_attributes, we need a routine to
recursively reverse all the dies. This is that routine. */
static void
reverse_all_dies (die)
dw_die_ref die;
{
dw_die_ref c;
reverse_die_lists (die);
for (c = die->die_child; c; c = c->die_sib)
reverse_all_dies (c);
}
/* Start a new compilation unit DIE for an include file. OLD_UNIT is the CU
for the enclosing include file, if any. BINCL_DIE is the DW_TAG_GNU_BINCL
DIE that marks the start of the DIEs for this include file. */
static dw_die_ref
push_new_compile_unit (old_unit, bincl_die)
dw_die_ref old_unit, bincl_die;
{
const char *filename = get_AT_string (bincl_die, DW_AT_name);
dw_die_ref new_unit = gen_compile_unit_die (filename);
new_unit->die_sib = old_unit;
return new_unit;
}
/* Close an include-file CU and reopen the enclosing one. */
static dw_die_ref
pop_compile_unit (old_unit)
dw_die_ref old_unit;
{
dw_die_ref new_unit = old_unit->die_sib;
old_unit->die_sib = NULL;
return new_unit;
}
#define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
#define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
/* Calculate the checksum of a location expression. */
static inline void
loc_checksum (loc, ctx)
dw_loc_descr_ref loc;
struct md5_ctx *ctx;
{
CHECKSUM (loc->dw_loc_opc);
CHECKSUM (loc->dw_loc_oprnd1);
CHECKSUM (loc->dw_loc_oprnd2);
}
/* Calculate the checksum of an attribute. */
static void
attr_checksum (at, ctx, mark)
dw_attr_ref at;
struct md5_ctx *ctx;
int *mark;
{
dw_loc_descr_ref loc;
rtx r;
CHECKSUM (at->dw_attr);
/* We don't care about differences in file numbering. */
if (at->dw_attr == DW_AT_decl_file
/* Or that this was compiled with a different compiler snapshot; if
the output is the same, that's what matters. */
|| at->dw_attr == DW_AT_producer)
return;
switch (AT_class (at))
{
case dw_val_class_const:
CHECKSUM (at->dw_attr_val.v.val_int);
break;
case dw_val_class_unsigned_const:
CHECKSUM (at->dw_attr_val.v.val_unsigned);
break;
case dw_val_class_long_long:
CHECKSUM (at->dw_attr_val.v.val_long_long);
break;
case dw_val_class_float:
CHECKSUM (at->dw_attr_val.v.val_float);
break;
case dw_val_class_flag:
CHECKSUM (at->dw_attr_val.v.val_flag);
break;
case dw_val_class_str:
CHECKSUM_STRING (AT_string (at));
break;
case dw_val_class_addr:
r = AT_addr (at);
switch (GET_CODE (r))
{
case SYMBOL_REF:
CHECKSUM_STRING (XSTR (r, 0));
break;
default:
abort ();
}
break;
case dw_val_class_offset:
CHECKSUM (at->dw_attr_val.v.val_offset);
break;
case dw_val_class_loc:
for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
loc_checksum (loc, ctx);
break;
case dw_val_class_die_ref:
die_checksum (AT_ref (at), ctx, mark);
break;
case dw_val_class_fde_ref:
case dw_val_class_lbl_id:
case dw_val_class_lbl_offset:
break;
default:
break;
}
}
/* Calculate the checksum of a DIE. */
static void
die_checksum (die, ctx, mark)
dw_die_ref die;
struct md5_ctx *ctx;
int *mark;
{
dw_die_ref c;
dw_attr_ref a;
/* To avoid infinite recursion. */
if (die->die_mark)
{
CHECKSUM (die->die_mark);
return;
}
die->die_mark = ++(*mark);
CHECKSUM (die->die_tag);
for (a = die->die_attr; a; a = a->dw_attr_next)
attr_checksum (a, ctx, mark);
for (c = die->die_child; c; c = c->die_sib)
die_checksum (c, ctx, mark);
}
#undef CHECKSUM
#undef CHECKSUM_STRING
/* Do the location expressions look same? */
static inline int
same_loc_p (loc1, loc2, mark)
dw_loc_descr_ref loc1;
dw_loc_descr_ref loc2;
int *mark;
{
return loc1->dw_loc_opc == loc2->dw_loc_opc
&& same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
&& same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
}
/* Do the values look the same? */
static int
same_dw_val_p (v1, v2, mark)
dw_val_node *v1;
dw_val_node *v2;
int *mark;
{
dw_loc_descr_ref loc1, loc2;
rtx r1, r2;
unsigned i;
if (v1->val_class != v2->val_class)
return 0;
switch (v1->val_class)
{
case dw_val_class_const:
return v1->v.val_int == v2->v.val_int;
case dw_val_class_unsigned_const:
return v1->v.val_unsigned == v2->v.val_unsigned;
case dw_val_class_long_long:
return v1->v.val_long_long.hi == v2->v.val_long_long.hi
&& v1->v.val_long_long.low == v2->v.val_long_long.low;
case dw_val_class_float:
if (v1->v.val_float.length != v2->v.val_float.length)
return 0;
for (i = 0; i < v1->v.val_float.length; i++)
if (v1->v.val_float.array[i] != v2->v.val_float.array[i])
return 0;
return 1;
case dw_val_class_flag:
return v1->v.val_flag == v2->v.val_flag;
case dw_val_class_str:
return !strcmp((const char *) HT_STR (&v1->v.val_str->id),
(const char *) HT_STR (&v2->v.val_str->id));
case dw_val_class_addr:
r1 = v1->v.val_addr;
r2 = v2->v.val_addr;
if (GET_CODE (r1) != GET_CODE (r2))
return 0;
switch (GET_CODE (r1))
{
case SYMBOL_REF:
return !strcmp (XSTR (r1, 0), XSTR (r2, 0));
default:
abort ();
}
case dw_val_class_offset:
return v1->v.val_offset == v2->v.val_offset;
case dw_val_class_loc:
for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
loc1 && loc2;
loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
if (!same_loc_p (loc1, loc2, mark))
return 0;
return !loc1 && !loc2;
case dw_val_class_die_ref:
return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
case dw_val_class_fde_ref:
case dw_val_class_lbl_id:
case dw_val_class_lbl_offset:
return 1;
default:
return 1;
}
}
/* Do the attributes look the same? */
static int
same_attr_p (at1, at2, mark)
dw_attr_ref at1;
dw_attr_ref at2;
int *mark;
{
if (at1->dw_attr != at2->dw_attr)
return 0;
/* We don't care about differences in file numbering. */
if (at1->dw_attr == DW_AT_decl_file
/* Or that this was compiled with a different compiler snapshot; if
the output is the same, that's what matters. */
|| at1->dw_attr == DW_AT_producer)
return 1;
return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
}
/* Do the dies look the same? */
static int
same_die_p (die1, die2, mark)
dw_die_ref die1;
dw_die_ref die2;
int *mark;
{
dw_die_ref c1, c2;
dw_attr_ref a1, a2;
/* To avoid infinite recursion. */
if (die1->die_mark)
return die1->die_mark == die2->die_mark;
die1->die_mark = die2->die_mark = ++(*mark);
if (die1->die_tag != die2->die_tag)
return 0;
for (a1 = die1->die_attr, a2 = die2->die_attr;
a1 && a2;
a1 = a1->dw_attr_next, a2 = a2->dw_attr_next)
if (!same_attr_p (a1, a2, mark))
return 0;
if (a1 || a2)
return 0;
for (c1 = die1->die_child, c2 = die2->die_child;
c1 && c2;
c1 = c1->die_sib, c2 = c2->die_sib)
if (!same_die_p (c1, c2, mark))
return 0;
if (c1 || c2)
return 0;
return 1;
}
/* Do the dies look the same? Wrapper around same_die_p. */
static int
same_die_p_wrap (die1, die2)
dw_die_ref die1;
dw_die_ref die2;
{
int mark = 0;
int ret = same_die_p (die1, die2, &mark);
unmark_all_dies (die1);
unmark_all_dies (die2);
return ret;
}
/* The prefix to attach to symbols on DIEs in the current comdat debug
info section. */
static char *comdat_symbol_id;
/* The index of the current symbol within the current comdat CU. */
static unsigned int comdat_symbol_number;
/* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
children, and set comdat_symbol_id accordingly. */
static void
compute_section_prefix (unit_die)
dw_die_ref unit_die;
{
const char *die_name = get_AT_string (unit_die, DW_AT_name);
const char *base = die_name ? lbasename (die_name) : "anonymous";
char *name = (char *) alloca (strlen (base) + 64);
char *p;
int i, mark;
unsigned char checksum[16];
struct md5_ctx ctx;
/* Compute the checksum of the DIE, then append part of it as hex digits to
the name filename of the unit. */
md5_init_ctx (&ctx);
mark = 0;
die_checksum (unit_die, &ctx, &mark);
unmark_all_dies (unit_die);
md5_finish_ctx (&ctx, checksum);
sprintf (name, "%s.", base);
clean_symbol_name (name);
p = name + strlen (name);
for (i = 0; i < 4; i++)
{
sprintf (p, "%.2x", checksum[i]);
p += 2;
}
comdat_symbol_id = unit_die->die_symbol = xstrdup (name);
comdat_symbol_number = 0;
}
/* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
static int
is_type_die (die)
dw_die_ref die;
{
switch (die->die_tag)
{
case DW_TAG_array_type:
case DW_TAG_class_type:
case DW_TAG_enumeration_type:
case DW_TAG_pointer_type:
case DW_TAG_reference_type:
case DW_TAG_string_type:
case DW_TAG_structure_type:
case DW_TAG_subroutine_type:
case DW_TAG_union_type:
case DW_TAG_ptr_to_member_type:
case DW_TAG_set_type:
case DW_TAG_subrange_type:
case DW_TAG_base_type:
case DW_TAG_const_type:
case DW_TAG_file_type:
case DW_TAG_packed_type:
case DW_TAG_volatile_type:
case DW_TAG_typedef:
return 1;
default:
return 0;
}
}
/* Returns 1 iff C is the sort of DIE that should go into a COMDAT CU.
Basically, we want to choose the bits that are likely to be shared between
compilations (types) and leave out the bits that are specific to individual
compilations (functions). */
static int
is_comdat_die (c)
dw_die_ref c;
{
/* I think we want to leave base types and __vtbl_ptr_type in the main CU, as
we do for stabs. The advantage is a greater likelihood of sharing between
objects that don't include headers in the same order (and therefore would
put the base types in a different comdat). jason 8/28/00 */
if (c->die_tag == DW_TAG_base_type)
return 0;
if (c->die_tag == DW_TAG_pointer_type
|| c->die_tag == DW_TAG_reference_type
|| c->die_tag == DW_TAG_const_type
|| c->die_tag == DW_TAG_volatile_type)
{
dw_die_ref t = get_AT_ref (c, DW_AT_type);
return t ? is_comdat_die (t) : 0;
}
return is_type_die (c);
}
/* Returns 1 iff C is the sort of DIE that might be referred to from another
compilation unit. */
static int
is_symbol_die (c)
dw_die_ref c;
{
return (is_type_die (c)
|| (get_AT (c, DW_AT_declaration)
&& !get_AT (c, DW_AT_specification)));
}
static char *
gen_internal_sym (prefix)
const char *prefix;
{
char buf[256];
static int label_num;
ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
return xstrdup (buf);
}
/* Assign symbols to all worthy DIEs under DIE. */
static void
assign_symbol_names (die)
dw_die_ref die;
{
dw_die_ref c;
if (is_symbol_die (die))
{
if (comdat_symbol_id)
{
char *p = alloca (strlen (comdat_symbol_id) + 64);
sprintf (p, "%s.%s.%x", DIE_LABEL_PREFIX,
comdat_symbol_id, comdat_symbol_number++);
die->die_symbol = xstrdup (p);
}
else
die->die_symbol = gen_internal_sym ("LDIE");
}
for (c = die->die_child; c != NULL; c = c->die_sib)
assign_symbol_names (c);
}
struct cu_hash_table_entry
{
dw_die_ref cu;
unsigned min_comdat_num, max_comdat_num;
struct cu_hash_table_entry *next;
};
/* Routines to manipulate hash table of CUs. */
static hashval_t
htab_cu_hash (of)
const void *of;
{
const struct cu_hash_table_entry *entry = of;
return htab_hash_string (entry->cu->die_symbol);
}
static int
htab_cu_eq (of1, of2)
const void *of1;
const void *of2;
{
const struct cu_hash_table_entry *entry1 = of1;
const struct die_struct *entry2 = of2;
return !strcmp (entry1->cu->die_symbol, entry2->die_symbol);
}
static void
htab_cu_del (what)
void *what;
{
struct cu_hash_table_entry *next, *entry = what;
while (entry)
{
next = entry->next;
free (entry);
entry = next;
}
}
/* Check whether we have already seen this CU and set up SYM_NUM
accordingly. */
static int
check_duplicate_cu (cu, htable, sym_num)
dw_die_ref cu;
htab_t htable;
unsigned *sym_num;
{
struct cu_hash_table_entry dummy;
struct cu_hash_table_entry **slot, *entry, *last = &dummy;
dummy.max_comdat_num = 0;
slot = (struct cu_hash_table_entry **)
htab_find_slot_with_hash (htable, cu, htab_hash_string (cu->die_symbol),
INSERT);
entry = *slot;
for (; entry; last = entry, entry = entry->next)
{
if (same_die_p_wrap (cu, entry->cu))
break;
}
if (entry)
{
*sym_num = entry->min_comdat_num;
return 1;
}
entry = xcalloc (1, sizeof (struct cu_hash_table_entry));
entry->cu = cu;
entry->min_comdat_num = *sym_num = last->max_comdat_num;
entry->next = *slot;
*slot = entry;
return 0;
}
/* Record SYM_NUM to record of CU in HTABLE. */
static void
record_comdat_symbol_number (cu, htable, sym_num)
dw_die_ref cu;
htab_t htable;
unsigned sym_num;
{
struct cu_hash_table_entry **slot, *entry;
slot = (struct cu_hash_table_entry **)
htab_find_slot_with_hash (htable, cu, htab_hash_string (cu->die_symbol),
NO_INSERT);
entry = *slot;
entry->max_comdat_num = sym_num;
}
/* Traverse the DIE (which is always comp_unit_die), and set up
additional compilation units for each of the include files we see
bracketed by BINCL/EINCL. */
static void
break_out_includes (die)
dw_die_ref die;
{
dw_die_ref *ptr;
dw_die_ref unit = NULL;
limbo_die_node *node, **pnode;
htab_t cu_hash_table;
for (ptr = &(die->die_child); *ptr;)
{
dw_die_ref c = *ptr;
if (c->die_tag == DW_TAG_GNU_BINCL || c->die_tag == DW_TAG_GNU_EINCL
|| (unit && is_comdat_die (c)))
{
/* This DIE is for a secondary CU; remove it from the main one. */
*ptr = c->die_sib;
if (c->die_tag == DW_TAG_GNU_BINCL)
{
unit = push_new_compile_unit (unit, c);
free_die (c);
}
else if (c->die_tag == DW_TAG_GNU_EINCL)
{
unit = pop_compile_unit (unit);
free_die (c);
}
else
add_child_die (unit, c);
}
else
{
/* Leave this DIE in the main CU. */
ptr = &(c->die_sib);
continue;
}
}
assign_symbol_names (die);
cu_hash_table = htab_create (10, htab_cu_hash, htab_cu_eq, htab_cu_del);
for (node = limbo_die_list, pnode = &limbo_die_list;
node;
node = node->next)
{
int is_dupl;
compute_section_prefix (node->die);
is_dupl = check_duplicate_cu (node->die, cu_hash_table,
&comdat_symbol_number);
assign_symbol_names (node->die);
if (is_dupl)
*pnode = node->next;
else
{
pnode = &node->next;
record_comdat_symbol_number (node->die, cu_hash_table,
comdat_symbol_number);
}
}
htab_delete (cu_hash_table);
}
/* Traverse the DIE and add a sibling attribute if it may have the
effect of speeding up access to siblings. To save some space,
avoid generating sibling attributes for DIE's without children. */
static void
add_sibling_attributes (die)
dw_die_ref die;
{
dw_die_ref c;
if (die->die_tag != DW_TAG_compile_unit
&& die->die_sib && die->die_child != NULL)
/* Add the sibling link to the front of the attribute list. */
add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
for (c = die->die_child; c != NULL; c = c->die_sib)
add_sibling_attributes (c);
}
/* Output all location lists for the DIE and its children. */
static void
output_location_lists (die)
dw_die_ref die;
{
dw_die_ref c;
dw_attr_ref d_attr;
for (d_attr = die->die_attr; d_attr; d_attr = d_attr->dw_attr_next)
if (AT_class (d_attr) == dw_val_class_loc_list)
output_loc_list (AT_loc_list (d_attr));
for (c = die->die_child; c != NULL; c = c->die_sib)
output_location_lists (c);
}
/* The format of each DIE (and its attribute value pairs) is encoded in an
abbreviation table. This routine builds the abbreviation table and assigns
a unique abbreviation id for each abbreviation entry. The children of each
die are visited recursively. */
static void
build_abbrev_table (die)
dw_die_ref die;
{
unsigned long abbrev_id;
unsigned int n_alloc;
dw_die_ref c;
dw_attr_ref d_attr, a_attr;
/* Scan the DIE references, and mark as external any that refer to
DIEs from other CUs (i.e. those which are not marked). */
for (d_attr = die->die_attr; d_attr; d_attr = d_attr->dw_attr_next)
if (AT_class (d_attr) == dw_val_class_die_ref
&& AT_ref (d_attr)->die_mark == 0)
{
if (AT_ref (d_attr)->die_symbol == 0)
abort ();
set_AT_ref_external (d_attr, 1);
}
for (abbrev_id = 1; abbrev_id < abbrev_die_table_in_use; ++abbrev_id)
{
dw_die_ref abbrev = abbrev_die_table[abbrev_id];
if (abbrev->die_tag == die->die_tag)
{
if ((abbrev->die_child != NULL) == (die->die_child != NULL))
{
a_attr = abbrev->die_attr;
d_attr = die->die_attr;
while (a_attr != NULL && d_attr != NULL)
{
if ((a_attr->dw_attr != d_attr->dw_attr)
|| (value_format (a_attr) != value_format (d_attr)))
break;
a_attr = a_attr->dw_attr_next;
d_attr = d_attr->dw_attr_next;
}
if (a_attr == NULL && d_attr == NULL)
break;
}
}
}
if (abbrev_id >= abbrev_die_table_in_use)
{
if (abbrev_die_table_in_use >= abbrev_die_table_allocated)
{
n_alloc = abbrev_die_table_allocated + ABBREV_DIE_TABLE_INCREMENT;
abbrev_die_table
= (dw_die_ref *) xrealloc (abbrev_die_table,
sizeof (dw_die_ref) * n_alloc);
memset ((char *) &abbrev_die_table[abbrev_die_table_allocated], 0,
(n_alloc - abbrev_die_table_allocated) * sizeof (dw_die_ref));
abbrev_die_table_allocated = n_alloc;
}
++abbrev_die_table_in_use;
abbrev_die_table[abbrev_id] = die;
}
die->die_abbrev = abbrev_id;
for (c = die->die_child; c != NULL; c = c->die_sib)
build_abbrev_table (c);
}
/* Return the power-of-two number of bytes necessary to represent VALUE. */
static int
constant_size (value)
long unsigned value;
{
int log;
if (value == 0)
log = 0;
else
log = floor_log2 (value);
log = log / 8;
log = 1 << (floor_log2 (log) + 1);
return log;
}
/* Return the size of a DIE as it is represented in the
.debug_info section. */
static unsigned long
size_of_die (die)
dw_die_ref die;
{
unsigned long size = 0;
dw_attr_ref a;
size += size_of_uleb128 (die->die_abbrev);
for (a = die->die_attr; a != NULL; a = a->dw_attr_next)
{
switch (AT_class (a))
{
case dw_val_class_addr:
size += DWARF2_ADDR_SIZE;
break;
case dw_val_class_offset:
size += DWARF_OFFSET_SIZE;
break;
case dw_val_class_loc:
{
unsigned long lsize = size_of_locs (AT_loc (a));
/* Block length. */
size += constant_size (lsize);
size += lsize;
}
break;
case dw_val_class_loc_list:
size += DWARF_OFFSET_SIZE;
break;
case dw_val_class_range_list:
size += DWARF_OFFSET_SIZE;
break;
case dw_val_class_const:
size += size_of_sleb128 (AT_int (a));
break;
case dw_val_class_unsigned_const:
size += constant_size (AT_unsigned (a));
break;
case dw_val_class_long_long:
size += 1 + 2*HOST_BITS_PER_LONG/HOST_BITS_PER_CHAR; /* block */
break;
case dw_val_class_float:
size += 1 + a->dw_attr_val.v.val_float.length * 4; /* block */
break;
case dw_val_class_flag:
size += 1;
break;
case dw_val_class_die_ref:
size += DWARF_OFFSET_SIZE;
break;
case dw_val_class_fde_ref:
size += DWARF_OFFSET_SIZE;
break;
case dw_val_class_lbl_id:
size += DWARF2_ADDR_SIZE;
break;
case dw_val_class_lbl_offset:
size += DWARF_OFFSET_SIZE;
break;
case dw_val_class_str:
if (AT_string_form (a) == DW_FORM_strp)
size += DWARF_OFFSET_SIZE;
else
size += HT_LEN (&a->dw_attr_val.v.val_str->id) + 1;
break;
default:
abort ();
}
}
return size;
}
/* Size the debugging information associated with a given DIE. Visits the
DIE's children recursively. Updates the global variable next_die_offset, on
each time through. Uses the current value of next_die_offset to update the
die_offset field in each DIE. */
static void
calc_die_sizes (die)
dw_die_ref die;
{
dw_die_ref c;
die->die_offset = next_die_offset;
next_die_offset += size_of_die (die);
for (c = die->die_child; c != NULL; c = c->die_sib)
calc_die_sizes (c);
if (die->die_child != NULL)
/* Count the null byte used to terminate sibling lists. */
next_die_offset += 1;
}
/* Set the marks for a die and its children. We do this so
that we know whether or not a reference needs to use FORM_ref_addr; only
DIEs in the same CU will be marked. We used to clear out the offset
and use that as the flag, but ran into ordering problems. */
static void
mark_dies (die)
dw_die_ref die;
{
dw_die_ref c;
if (die->die_mark)
abort ();
die->die_mark = 1;
for (c = die->die_child; c; c = c->die_sib)
mark_dies (c);
}
/* Clear the marks for a die and its children. */
static void
unmark_dies (die)
dw_die_ref die;
{
dw_die_ref c;
if (!die->die_mark)
abort ();
die->die_mark = 0;
for (c = die->die_child; c; c = c->die_sib)
unmark_dies (c);
}
/* Clear the marks for a die, its children and referred dies. */
static void
unmark_all_dies (die)
dw_die_ref die;
{
dw_die_ref c;
dw_attr_ref a;
if (!die->die_mark)
return;
die->die_mark = 0;
for (c = die->die_child; c; c = c->die_sib)
unmark_all_dies (c);
for (a = die->die_attr; a; a = a->dw_attr_next)
if (AT_class (a) == dw_val_class_die_ref)
unmark_all_dies (AT_ref (a));
}
/* Return the size of the .debug_pubnames table generated for the
compilation unit. */
static unsigned long
size_of_pubnames ()
{
unsigned long size;
unsigned i;
size = DWARF_PUBNAMES_HEADER_SIZE;
for (i = 0; i < pubname_table_in_use; i++)
{
pubname_ref p = &pubname_table[i];
size += DWARF_OFFSET_SIZE + strlen (p->name) + 1;
}
size += DWARF_OFFSET_SIZE;
return size;
}
/* Return the size of the information in the .debug_aranges section. */
static unsigned long
size_of_aranges ()
{
unsigned long size;
size = DWARF_ARANGES_HEADER_SIZE;
/* Count the address/length pair for this compilation unit. */
size += 2 * DWARF2_ADDR_SIZE;
size += 2 * DWARF2_ADDR_SIZE * arange_table_in_use;
/* Count the two zero words used to terminated the address range table. */
size += 2 * DWARF2_ADDR_SIZE;
return size;
}
/* Select the encoding of an attribute value. */
static enum dwarf_form
value_format (a)
dw_attr_ref a;
{
switch (a->dw_attr_val.val_class)
{
case dw_val_class_addr:
return DW_FORM_addr;
case dw_val_class_range_list:
case dw_val_class_offset:
if (DWARF_OFFSET_SIZE == 4)
return DW_FORM_data4;
if (DWARF_OFFSET_SIZE == 8)
return DW_FORM_data8;
abort ();
case dw_val_class_loc_list:
/* FIXME: Could be DW_FORM_data8, with a > 32 bit size
.debug_loc section */
return DW_FORM_data4;
case dw_val_class_loc:
switch (constant_size (size_of_locs (AT_loc (a))))
{
case 1:
return DW_FORM_block1;
case 2:
return DW_FORM_block2;
default:
abort ();
}
case dw_val_class_const:
return DW_FORM_sdata;
case dw_val_class_unsigned_const:
switch (constant_size (AT_unsigned (a)))
{
case 1:
return DW_FORM_data1;
case 2:
return DW_FORM_data2;
case 4:
return DW_FORM_data4;
case 8:
return DW_FORM_data8;
default:
abort ();
}
case dw_val_class_long_long:
return DW_FORM_block1;
case dw_val_class_float:
return DW_FORM_block1;
case dw_val_class_flag:
return DW_FORM_flag;
case dw_val_class_die_ref:
if (AT_ref_external (a))
return DW_FORM_ref_addr;
else
return DW_FORM_ref;
case dw_val_class_fde_ref:
return DW_FORM_data;
case dw_val_class_lbl_id:
return DW_FORM_addr;
case dw_val_class_lbl_offset:
return DW_FORM_data;
case dw_val_class_str:
return AT_string_form (a);
default:
abort ();
}
}
/* Output the encoding of an attribute value. */
static void
output_value_format (a)
dw_attr_ref a;
{
enum dwarf_form form = value_format (a);
dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
}
/* Output the .debug_abbrev section which defines the DIE abbreviation
table. */
static void
output_abbrev_section ()
{
unsigned long abbrev_id;
dw_attr_ref a_attr;
for (abbrev_id = 1; abbrev_id < abbrev_die_table_in_use; ++abbrev_id)
{
dw_die_ref abbrev = abbrev_die_table[abbrev_id];
dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
dwarf_tag_name (abbrev->die_tag));
if (abbrev->die_child != NULL)
dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
else
dw2_asm_output_data (1, DW_children_no, "DW_children_no");
for (a_attr = abbrev->die_attr; a_attr != NULL;
a_attr = a_attr->dw_attr_next)
{
dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
dwarf_attr_name (a_attr->dw_attr));
output_value_format (a_attr);
}
dw2_asm_output_data (1, 0, NULL);
dw2_asm_output_data (1, 0, NULL);
}
/* Terminate the table. */
dw2_asm_output_data (1, 0, NULL);
}
/* Output a symbol we can use to refer to this DIE from another CU. */
static inline void
output_die_symbol (die)
dw_die_ref die;
{
char *sym = die->die_symbol;
if (sym == 0)
return;
if (strncmp (sym, DIE_LABEL_PREFIX, sizeof (DIE_LABEL_PREFIX) - 1) == 0)
/* We make these global, not weak; if the target doesn't support
.linkonce, it doesn't support combining the sections, so debugging
will break. */
(*targetm.asm_out.globalize_label) (asm_out_file, sym);
ASM_OUTPUT_LABEL (asm_out_file, sym);
}
/* Return a new location list, given the begin and end range, and the
expression. gensym tells us whether to generate a new internal symbol for
this location list node, which is done for the head of the list only. */
static inline dw_loc_list_ref
new_loc_list (expr, begin, end, section, gensym)
dw_loc_descr_ref expr;
const char *begin;
const char *end;
const char *section;
unsigned gensym;
{
dw_loc_list_ref retlist
= (dw_loc_list_ref) xcalloc (1, sizeof (dw_loc_list_node));
retlist->begin = begin;
retlist->end = end;
retlist->expr = expr;
retlist->section = section;
if (gensym)
retlist->ll_symbol = gen_internal_sym ("LLST");
return retlist;
}
/* Add a location description expression to a location list */
static inline void
add_loc_descr_to_loc_list (list_head, descr, begin, end, section)
dw_loc_list_ref *list_head;
dw_loc_descr_ref descr;
const char *begin;
const char *end;
const char *section;
{
dw_loc_list_ref *d;
/* Find the end of the chain. */
for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
;
/* Add a new location list node to the list */
*d = new_loc_list (descr, begin, end, section, 0);
}
/* Output the location list given to us */
static void
output_loc_list (list_head)
dw_loc_list_ref list_head;
{
dw_loc_list_ref curr = list_head;
ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
/* ??? This shouldn't be needed now that we've forced the
compilation unit base address to zero when there is code
in more than one section. */
if (strcmp (curr->section, ".text") == 0)
{
/* dw2_asm_output_data will mask off any extra bits in the ~0. */
dw2_asm_output_data (DWARF2_ADDR_SIZE, ~(unsigned HOST_WIDE_INT) 0,
"Location list base address specifier fake entry");
dw2_asm_output_offset (DWARF2_ADDR_SIZE, curr->section,
"Location list base address specifier base");
}
for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
{
unsigned long size;
dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
"Location list begin address (%s)",
list_head->ll_symbol);
dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
"Location list end address (%s)",
list_head->ll_symbol);
size = size_of_locs (curr->expr);
/* Output the block length for this list of location operations. */
if (size > 0xffff)
abort ();
dw2_asm_output_data (2, size, "%s", "Location expression size");
output_loc_sequence (curr->expr);
}
dw2_asm_output_data (DWARF_OFFSET_SIZE, 0,
"Location list terminator begin (%s)",
list_head->ll_symbol);
dw2_asm_output_data (DWARF_OFFSET_SIZE, 0,
"Location list terminator end (%s)",
list_head->ll_symbol);
}
/* Output the DIE and its attributes. Called recursively to generate
the definitions of each child DIE. */
static void
output_die (die)
dw_die_ref die;
{
dw_attr_ref a;
dw_die_ref c;
unsigned long size;
/* If someone in another CU might refer to us, set up a symbol for
them to point to. */
if (die->die_symbol)
output_die_symbol (die);
dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (0x%lx) %s)",
die->die_offset, dwarf_tag_name (die->die_tag));
for (a = die->die_attr; a != NULL; a = a->dw_attr_next)
{
const char *name = dwarf_attr_name (a->dw_attr);
switch (AT_class (a))
{
case dw_val_class_addr:
dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
break;
case dw_val_class_offset:
dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
"%s", name);
break;
case dw_val_class_range_list:
{
char *p = strchr (ranges_section_label, '\0');
sprintf (p, "+0x%lx", a->dw_attr_val.v.val_offset);
dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
"%s", name);
*p = '\0';
}
break;
case dw_val_class_loc:
size = size_of_locs (AT_loc (a));
/* Output the block length for this list of location operations. */
dw2_asm_output_data (constant_size (size), size, "%s", name);
output_loc_sequence (AT_loc (a));
break;
case dw_val_class_const:
/* ??? It would be slightly more efficient to use a scheme like is
used for unsigned constants below, but gdb 4.x does not sign
extend. Gdb 5.x does sign extend. */
dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
break;
case dw_val_class_unsigned_const:
dw2_asm_output_data (constant_size (AT_unsigned (a)),
AT_unsigned (a), "%s", name);
break;
case dw_val_class_long_long:
{
unsigned HOST_WIDE_INT first, second;
dw2_asm_output_data (1,
2 * HOST_BITS_PER_LONG / HOST_BITS_PER_CHAR,
"%s", name);
if (WORDS_BIG_ENDIAN)
{
first = a->dw_attr_val.v.val_long_long.hi;
second = a->dw_attr_val.v.val_long_long.low;
}
else
{
first = a->dw_attr_val.v.val_long_long.low;
second = a->dw_attr_val.v.val_long_long.hi;
}
dw2_asm_output_data (HOST_BITS_PER_LONG / HOST_BITS_PER_CHAR,
first, "long long constant");
dw2_asm_output_data (HOST_BITS_PER_LONG / HOST_BITS_PER_CHAR,
second, NULL);
}
break;
case dw_val_class_float:
{
unsigned int i;
dw2_asm_output_data (1, a->dw_attr_val.v.val_float.length * 4,
"%s", name);
for (i = 0; i < a->dw_attr_val.v.val_float.length; i++)
dw2_asm_output_data (4, a->dw_attr_val.v.val_float.array[i],
"fp constant word %u", i);
break;
}
case dw_val_class_flag:
dw2_asm_output_data (1, AT_flag (a), "%s", name);
break;
case dw_val_class_loc_list:
{
char *sym = AT_loc_list (a)->ll_symbol;
if (sym == 0)
abort ();
dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym,
loc_section_label, "%s", name);
}
break;
case dw_val_class_die_ref:
if (AT_ref_external (a))
{
char *sym = AT_ref (a)->die_symbol;
if (sym == 0)
abort ();
dw2_asm_output_offset (DWARF2_ADDR_SIZE, sym, "%s", name);
}
else if (AT_ref (a)->die_offset == 0)
abort ();
else
dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
"%s", name);
break;
case dw_val_class_fde_ref:
{
char l1[20];
ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
a->dw_attr_val.v.val_fde_index * 2);
dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, "%s", name);
}
break;
case dw_val_class_lbl_id:
dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
break;
case dw_val_class_lbl_offset:
dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a), "%s", name);
break;
case dw_val_class_str:
if (AT_string_form (a) == DW_FORM_strp)
dw2_asm_output_offset (DWARF_OFFSET_SIZE,
a->dw_attr_val.v.val_str->label,
"%s: \"%s\"", name, AT_string (a));
else
dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
break;
default:
abort ();
}
}
for (c = die->die_child; c != NULL; c = c->die_sib)
output_die (c);
/* Add null byte to terminate sibling list. */
if (die->die_child != NULL)
dw2_asm_output_data (1, 0, "end of children of DIE 0x%lx",
die->die_offset);
}
/* Output the compilation unit that appears at the beginning of the
.debug_info section, and precedes the DIE descriptions. */
static void
output_compilation_unit_header ()
{
dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset - DWARF_OFFSET_SIZE,
"Length of Compilation Unit Info");
dw2_asm_output_data (2, DWARF_VERSION, "DWARF version number");
dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
"Offset Into Abbrev. Section");
dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
}
/* Output the compilation unit DIE and its children. */
static void
output_comp_unit (die, output_if_empty)
dw_die_ref die;
int output_if_empty;
{
const char *secname;
char *oldsym, *tmp;
/* Unless we are outputting main CU, we may throw away empty ones. */
if (!output_if_empty && die->die_child == NULL)
return;
/* Even if there are no children of this DIE, we must output the information
about the compilation unit. Otherwise, on an empty translation unit, we
will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
will then complain when examining the file. First mark all the DIEs in
this CU so we know which get local refs. */
mark_dies (die);
build_abbrev_table (die);
/* Initialize the beginning DIE offset - and calculate sizes/offsets. */
next_die_offset = DWARF_COMPILE_UNIT_HEADER_SIZE;
calc_die_sizes (die);
oldsym = die->die_symbol;
if (oldsym)
{
tmp = (char *) alloca (strlen (oldsym) + 24);
sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
secname = tmp;
die->die_symbol = NULL;
}
else
secname = (const char *) DEBUG_INFO_SECTION;
/* Output debugging information. */
named_section_flags (secname, SECTION_DEBUG);
output_compilation_unit_header ();
output_die (die);
/* Leave the marks on the main CU, so we can check them in
output_pubnames. */
if (oldsym)
{
unmark_dies (die);
die->die_symbol = oldsym;
}
}
/* The DWARF2 pubname for a nested thingy looks like "A::f". The
output of lang_hooks.decl_printable_name for C++ looks like
"A::f(int)". Let's drop the argument list, and maybe the scope. */
static const char *
dwarf2_name (decl, scope)
tree decl;
int scope;
{
return (*lang_hooks.decl_printable_name) (decl, scope ? 1 : 0);
}
/* Add a new entry to .debug_pubnames if appropriate. */
static void
add_pubname (decl, die)
tree decl;
dw_die_ref die;
{
pubname_ref p;
if (! TREE_PUBLIC (decl))
return;
if (pubname_table_in_use == pubname_table_allocated)
{
pubname_table_allocated += PUBNAME_TABLE_INCREMENT;
pubname_table
= (pubname_ref) xrealloc (pubname_table,
(pubname_table_allocated
* sizeof (pubname_entry)));
}
p = &pubname_table[pubname_table_in_use++];
p->die = die;
p->name = xstrdup (dwarf2_name (decl, 1));
}
/* Output the public names table used to speed up access to externally
visible names. For now, only generate entries for externally
visible procedures. */
static void
output_pubnames ()
{
unsigned i;
unsigned long pubnames_length = size_of_pubnames ();
dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
"Length of Public Names Info");
dw2_asm_output_data (2, DWARF_VERSION, "DWARF Version");
dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
"Offset of Compilation Unit Info");
dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
"Compilation Unit Length");
for (i = 0; i < pubname_table_in_use; i++)
{
pubname_ref pub = &pubname_table[i];
/* We shouldn't see pubnames for DIEs outside of the main CU. */
if (pub->die->die_mark == 0)
abort ();
dw2_asm_output_data (DWARF_OFFSET_SIZE, pub->die->die_offset,
"DIE offset");
dw2_asm_output_nstring (pub->name, -1, "external name");
}
dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
}
/* Add a new entry to .debug_aranges if appropriate. */
static void
add_arange (decl, die)
tree decl;
dw_die_ref die;
{
if (! DECL_SECTION_NAME (decl))
return;
if (arange_table_in_use == arange_table_allocated)
{
arange_table_allocated += ARANGE_TABLE_INCREMENT;
arange_table = (dw_die_ref *)
xrealloc (arange_table, arange_table_allocated * sizeof (dw_die_ref));
}
arange_table[arange_table_in_use++] = die;
}
/* Output the information that goes into the .debug_aranges table.
Namely, define the beginning and ending address range of the
text section generated for this compilation unit. */
static void
output_aranges ()
{
unsigned i;
unsigned long aranges_length = size_of_aranges ();
dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
"Length of Address Ranges Info");
dw2_asm_output_data (2, DWARF_VERSION, "DWARF Version");
dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
"Offset of Compilation Unit Info");
dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
/* We need to align to twice the pointer size here. */
if (DWARF_ARANGES_PAD_SIZE)
{
/* Pad using a 2 byte words so that padding is correct for any
pointer size. */
dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
2 * DWARF2_ADDR_SIZE);
for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
dw2_asm_output_data (2, 0, NULL);
}
dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
text_section_label, "Length");
for (i = 0; i < arange_table_in_use; i++)
{
dw_die_ref die = arange_table[i];
/* We shouldn't see aranges for DIEs outside of the main CU. */
if (die->die_mark == 0)
abort ();
if (die->die_tag == DW_TAG_subprogram)
{
dw2_asm_output_addr (DWARF2_ADDR_SIZE, get_AT_low_pc (die),
"Address");
dw2_asm_output_delta (DWARF2_ADDR_SIZE, get_AT_hi_pc (die),
get_AT_low_pc (die), "Length");
}
else
{
/* A static variable; extract the symbol from DW_AT_location.
Note that this code isn't currently hit, as we only emit
aranges for functions (jason 9/23/99). */
dw_attr_ref a = get_AT (die, DW_AT_location);
dw_loc_descr_ref loc;
if (! a || AT_class (a) != dw_val_class_loc)
abort ();
loc = AT_loc (a);
if (loc->dw_loc_opc != DW_OP_addr)
abort ();
dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE,
loc->dw_loc_oprnd1.v.val_addr, "Address");
dw2_asm_output_data (DWARF2_ADDR_SIZE,
get_AT_unsigned (die, DW_AT_byte_size),
"Length");
}
}
/* Output the terminator words. */
dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
}
/* Add a new entry to .debug_ranges. Return the offset at which it
was placed. */
static unsigned int
add_ranges (block)
tree block;
{
unsigned int in_use = ranges_table_in_use;
if (in_use == ranges_table_allocated)
{
ranges_table_allocated += RANGES_TABLE_INCREMENT;
ranges_table = (dw_ranges_ref)
xrealloc (ranges_table, (ranges_table_allocated
* sizeof (struct dw_ranges_struct)));
}
ranges_table[in_use].block_num = (block ? BLOCK_NUMBER (block) : 0);
ranges_table_in_use = in_use + 1;
return in_use * 2 * DWARF2_ADDR_SIZE;
}
static void
output_ranges ()
{
unsigned i;
static const char *const start_fmt = "Offset 0x%x";
const char *fmt = start_fmt;
for (i = 0; i < ranges_table_in_use; i++)
{
int block_num = ranges_table[i].block_num;
if (block_num)
{
char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
/* If all code is in the text section, then the compilation
unit base address defaults to DW_AT_low_pc, which is the
base of the text section. */
if (separate_line_info_table_in_use == 0)
{
dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
text_section_label,
fmt, i * 2 * DWARF2_ADDR_SIZE);
dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
text_section_label, NULL);
}
/* Otherwise, we add a DW_AT_entry_pc attribute to force the
compilation unit base address to zero, which allows us to
use absolute addresses, and not worry about whether the
target supports cross-section arithmetic. */
else
{
dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
fmt, i * 2 * DWARF2_ADDR_SIZE);
dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
}
fmt = NULL;
}
else
{
dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
fmt = start_fmt;
}
}
}
/* Data structure containing information about input files. */
struct file_info
{
char *path; /* Complete file name. */
char *fname; /* File name part. */
int length; /* Length of entire string. */
int file_idx; /* Index in input file table. */
int dir_idx; /* Index in directory table. */
};
/* Data structure containing information about directories with source
files. */
struct dir_info
{
char *path; /* Path including directory name. */
int length; /* Path length. */
int prefix; /* Index of directory entry which is a prefix. */
int count; /* Number of files in this directory. */
int dir_idx; /* Index of directory used as base. */
int used; /* Used in the end? */
};
/* Callback function for file_info comparison. We sort by looking at
the directories in the path. */
static int
file_info_cmp (p1, p2)
const void *p1;
const void *p2;
{
const struct file_info *s1 = p1;
const struct file_info *s2 = p2;
unsigned char *cp1;
unsigned char *cp2;
/* Take care of file names without directories. We need to make sure that
we return consistent values to qsort since some will get confused if
we return the same value when identical operands are passed in opposite
orders. So if neither has a directory, return 0 and otherwise return
1 or -1 depending on which one has the directory. */
if ((s1->path == s1->fname || s2->path == s2->fname))
return (s2->path == s2->fname) - (s1->path == s1->fname);
cp1 = (unsigned char *) s1->path;
cp2 = (unsigned char *) s2->path;
while (1)
{
++cp1;
++cp2;
/* Reached the end of the first path? If so, handle like above. */
if ((cp1 == (unsigned char *) s1->fname)
|| (cp2 == (unsigned char *) s2->fname))
return ((cp2 == (unsigned char *) s2->fname)
- (cp1 == (unsigned char *) s1->fname));
/* Character of current path component the same? */
else if (*cp1 != *cp2)
return *cp1 - *cp2;
}
}
/* Output the directory table and the file name table. We try to minimize
the total amount of memory needed. A heuristic is used to avoid large
slowdowns with many input files. */
static void
output_file_names ()
{
struct file_info *files;
struct dir_info *dirs;
int *saved;
int *savehere;
int *backmap;
int ndirs;
int idx_offset;
int i;
int idx;
/* Allocate the various arrays we need. */
files = (struct file_info *) alloca (file_table.in_use
* sizeof (struct file_info));
dirs = (struct dir_info *) alloca (file_table.in_use
* sizeof (struct dir_info));
/* Sort the file names. */
for (i = 1; i < (int) file_table.in_use; i++)
{
char *f;
/* Skip all leading "./". */
f = file_table.table[i];
while (f[0] == '.' && f[1] == '/')
f += 2;
/* Create a new array entry. */
files[i].path = f;
files[i].length = strlen (f);
files[i].file_idx = i;
/* Search for the file name part. */
f = strrchr (f, '/');
files[i].fname = f == NULL ? files[i].path : f + 1;
}
qsort (files + 1, file_table.in_use - 1, sizeof (files[0]), file_info_cmp);
/* Find all the different directories used. */
dirs[0].path = files[1].path;
dirs[0].length = files[1].fname - files[1].path;
dirs[0].prefix = -1;
dirs[0].count = 1;
dirs[0].dir_idx = 0;
dirs[0].used = 0;
files[1].dir_idx = 0;
ndirs = 1;
for (i = 2; i < (int) file_table.in_use; i++)
if (files[i].fname - files[i].path == dirs[ndirs - 1].length
&& memcmp (dirs[ndirs - 1].path, files[i].path,
dirs[ndirs - 1].length) == 0)
{
/* Same directory as last entry. */
files[i].dir_idx = ndirs - 1;
++dirs[ndirs - 1].count;
}
else
{
int j;
/* This is a new directory. */
dirs[ndirs].path = files[i].path;
dirs[ndirs].length = files[i].fname - files[i].path;
dirs[ndirs].count = 1;
dirs[ndirs].dir_idx = ndirs;
dirs[ndirs].used = 0;
files[i].dir_idx = ndirs;
/* Search for a prefix. */
dirs[ndirs].prefix = -1;
for (j = 0; j < ndirs; j++)
if (dirs[j].length < dirs[ndirs].length
&& dirs[j].length > 1
&& (dirs[ndirs].prefix == -1
|| dirs[j].length > dirs[dirs[ndirs].prefix].length)
&& memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
dirs[ndirs].prefix = j;
++ndirs;
}
/* Now to the actual work. We have to find a subset of the directories which
allow expressing the file name using references to the directory table
with the least amount of characters. We do not do an exhaustive search
where we would have to check out every combination of every single
possible prefix. Instead we use a heuristic which provides nearly optimal
results in most cases and never is much off. */
saved = (int *) alloca (ndirs * sizeof (int));
savehere = (int *) alloca (ndirs * sizeof (int));
memset (saved, '\0', ndirs * sizeof (saved[0]));
for (i = 0; i < ndirs; i++)
{
int j;
int total;
/* We can always save some space for the current directory. But this
does not mean it will be enough to justify adding the directory. */
savehere[i] = dirs[i].length;
total = (savehere[i] - saved[i]) * dirs[i].count;
for (j = i + 1; j < ndirs; j++)
{
savehere[j] = 0;
if (saved[j] < dirs[i].length)
{
/* Determine whether the dirs[i] path is a prefix of the
dirs[j] path. */
int k;
k = dirs[j].prefix;
while (k != -1 && k != i)
k = dirs[k].prefix;
if (k == i)
{
/* Yes it is. We can possibly safe some memory but
writing the filenames in dirs[j] relative to
dirs[i]. */
savehere[j] = dirs[i].length;
total += (savehere[j] - saved[j]) * dirs[j].count;
}
}
}
/* Check whether we can safe enough to justify adding the dirs[i]
directory. */
if (total > dirs[i].length + 1)
{
/* It's worthwhile adding. */
for (j = i; j < ndirs; j++)
if (savehere[j] > 0)
{
/* Remember how much we saved for this directory so far. */
saved[j] = savehere[j];
/* Remember the prefix directory. */
dirs[j].dir_idx = i;
}
}
}
/* We have to emit them in the order they appear in the file_table array
since the index is used in the debug info generation. To do this
efficiently we generate a back-mapping of the indices first. */
backmap = (int *) alloca (file_table.in_use * sizeof (int));
for (i = 1; i < (int) file_table.in_use; i++)
{
backmap[files[i].file_idx] = i;
/* Mark this directory as used. */
dirs[dirs[files[i].dir_idx].dir_idx].used = 1;
}
/* That was it. We are ready to emit the information. First emit the
directory name table. We have to make sure the first actually emitted
directory name has index one; zero is reserved for the current working
directory. Make sure we do not confuse these indices with the one for the
constructed table (even though most of the time they are identical). */
idx = 1;
idx_offset = dirs[0].length > 0 ? 1 : 0;
for (i = 1 - idx_offset; i < ndirs; i++)
if (dirs[i].used != 0)
{
dirs[i].used = idx++;
dw2_asm_output_nstring (dirs[i].path, dirs[i].length - 1,
"Directory Entry: 0x%x", dirs[i].used);
}
dw2_asm_output_data (1, 0, "End directory table");
/* Correct the index for the current working directory entry if it
exists. */
if (idx_offset == 0)
dirs[0].used = 0;
/* Now write all the file names. */
for (i = 1; i < (int) file_table.in_use; i++)
{
int file_idx = backmap[i];
int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
dw2_asm_output_nstring (files[file_idx].path + dirs[dir_idx].length, -1,
"File Entry: 0x%x", i);
/* Include directory index. */
dw2_asm_output_data_uleb128 (dirs[dir_idx].used, NULL);
/* Modification time. */
dw2_asm_output_data_uleb128 (0, NULL);
/* File length in bytes. */
dw2_asm_output_data_uleb128 (0, NULL);
}
dw2_asm_output_data (1, 0, "End file name table");
}
/* Output the source line number correspondence information. This
information goes into the .debug_line section. */
static void
output_line_info ()
{
char l1[20], l2[20], p1[20], p2[20];
char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
char prev_line_label[MAX_ARTIFICIAL_LABEL_BYTES];
unsigned opc;
unsigned n_op_args;
unsigned long lt_index;
unsigned long current_line;
long line_offset;
long line_delta;
unsigned long current_file;
unsigned long function;
ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, 0);
ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, 0);
ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, 0);
ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, 0);
dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
"Length of Source Line Info");
ASM_OUTPUT_LABEL (asm_out_file, l1);
dw2_asm_output_data (2, DWARF_VERSION, "DWARF Version");
dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
ASM_OUTPUT_LABEL (asm_out_file, p1);
/* Define the architecture-dependent minimum instruction length (in
bytes). In this implementation of DWARF, this field is used for
information purposes only. Since GCC generates assembly language,
we have no a priori knowledge of how many instruction bytes are
generated for each source line, and therefore can use only the
DW_LNE_set_address and DW_LNS_fixed_advance_pc line information
commands. Accordingly, we fix this as `1', which is "correct
enough" for all architectures, and don't let the target override. */
dw2_asm_output_data (1, 1,
"Minimum Instruction Length");
dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
"Default is_stmt_start flag");
dw2_asm_output_data (1, DWARF_LINE_BASE,
"Line Base Value (Special Opcodes)");
dw2_asm_output_data (1, DWARF_LINE_RANGE,
"Line Range Value (Special Opcodes)");
dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
"Special Opcode Base");
for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
{
switch (opc)
{
case DW_LNS_advance_pc:
case DW_LNS_advance_line:
case DW_LNS_set_file:
case DW_LNS_set_column:
case DW_LNS_fixed_advance_pc:
n_op_args = 1;
break;
default:
n_op_args = 0;
break;
}
dw2_asm_output_data (1, n_op_args, "opcode: 0x%x has %d args",
opc, n_op_args);
}
/* Write out the information about the files we use. */
output_file_names ();
ASM_OUTPUT_LABEL (asm_out_file, p2);
/* We used to set the address register to the first location in the text
section here, but that didn't accomplish anything since we already
have a line note for the opening brace of the first function. */
/* Generate the line number to PC correspondence table, encoded as
a series of state machine operations. */
current_file = 1;
current_line = 1;
strcpy (prev_line_label, text_section_label);
for (lt_index = 1; lt_index < line_info_table_in_use; ++lt_index)
{
dw_line_info_ref line_info = &line_info_table[lt_index];
/* Emit debug info for the address of the current line.
Unfortunately, we have little choice here currently, and must always
use the most general form. GCC does not know the address delta
itself, so we can't use DW_LNS_advance_pc. Many ports do have length
attributes which will give an upper bound on the address range. We
could perhaps use length attributes to determine when it is safe to
use DW_LNS_fixed_advance_pc. */
ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, lt_index);
if (0)
{
/* This can handle deltas up to 0xffff. This takes 3 bytes. */
dw2_asm_output_data (1, DW_LNS_fixed_advance_pc,
"DW_LNS_fixed_advance_pc");
dw2_asm_output_delta (2, line_label, prev_line_label, NULL);
}
else
{
/* This can handle any delta. This takes
4+DWARF2_ADDR_SIZE bytes. */
dw2_asm_output_data (1, 0, "DW_LNE_set_address");
dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
dw2_asm_output_data (1, DW_LNE_set_address, NULL);
dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
}
strcpy (prev_line_label, line_label);
/* Emit debug info for the source file of the current line, if
different from the previous line. */
if (line_info->dw_file_num != current_file)
{
current_file = line_info->dw_file_num;
dw2_asm_output_data (1, DW_LNS_set_file, "DW_LNS_set_file");
dw2_asm_output_data_uleb128 (current_file, "(\"%s\")",
file_table.table[current_file]);
}
/* Emit debug info for the current line number, choosing the encoding
that uses the least amount of space. */
if (line_info->dw_line_num != current_line)
{
line_offset = line_info->dw_line_num - current_line;
line_delta = line_offset - DWARF_LINE_BASE;
current_line = line_info->dw_line_num;
if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
/* This can handle deltas from -10 to 234, using the current
definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE. This
takes 1 byte. */
dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
"line %lu", current_line);
else
{
/* This can handle any delta. This takes at least 4 bytes,
depending on the value being encoded. */
dw2_asm_output_data (1, DW_LNS_advance_line,
"advance to line %lu", current_line);
dw2_asm_output_data_sleb128 (line_offset, NULL);
dw2_asm_output_data (1, DW_LNS_copy, "DW_LNS_copy");
}
}
else
/* We still need to start a new row, so output a copy insn. */
dw2_asm_output_data (1, DW_LNS_copy, "DW_LNS_copy");
}
/* Emit debug info for the address of the end of the function. */
if (0)
{
dw2_asm_output_data (1, DW_LNS_fixed_advance_pc,
"DW_LNS_fixed_advance_pc");
dw2_asm_output_delta (2, text_end_label, prev_line_label, NULL);
}
else
{
dw2_asm_output_data (1, 0, "DW_LNE_set_address");
dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
dw2_asm_output_data (1, DW_LNE_set_address, NULL);
dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_end_label, NULL);
}
dw2_asm_output_data (1, 0, "DW_LNE_end_sequence");
dw2_asm_output_data_uleb128 (1, NULL);
dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
function = 0;
current_file = 1;
current_line = 1;
for (lt_index = 0; lt_index < separate_line_info_table_in_use;)
{
dw_separate_line_info_ref line_info
= &separate_line_info_table[lt_index];
/* Emit debug info for the address of the current line. If this is
a new function, or the first line of a function, then we need
to handle it differently. */
ASM_GENERATE_INTERNAL_LABEL (line_label, SEPARATE_LINE_CODE_LABEL,
lt_index);
if (function != line_info->function)
{
function = line_info->function;
/* Set the address register to the first line in the function */
dw2_asm_output_data (1, 0, "DW_LNE_set_address");
dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
dw2_asm_output_data (1, DW_LNE_set_address, NULL);
dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
}
else
{
/* ??? See the DW_LNS_advance_pc comment above. */
if (0)
{
dw2_asm_output_data (1, DW_LNS_fixed_advance_pc,
"DW_LNS_fixed_advance_pc");
dw2_asm_output_delta (2, line_label, prev_line_label, NULL);
}
else
{
dw2_asm_output_data (1, 0, "DW_LNE_set_address");
dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
dw2_asm_output_data (1, DW_LNE_set_address, NULL);
dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
}
}
strcpy (prev_line_label, line_label);
/* Emit debug info for the source file of the current line, if
different from the previous line. */
if (line_info->dw_file_num != current_file)
{
current_file = line_info->dw_file_num;
dw2_asm_output_data (1, DW_LNS_set_file, "DW_LNS_set_file");
dw2_asm_output_data_uleb128 (current_file, "(\"%s\")",
file_table.table[current_file]);
}
/* Emit debug info for the current line number, choosing the encoding
that uses the least amount of space. */
if (line_info->dw_line_num != current_line)
{
line_offset = line_info->dw_line_num - current_line;
line_delta = line_offset - DWARF_LINE_BASE;
current_line = line_info->dw_line_num;
if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
"line %lu", current_line);
else
{
dw2_asm_output_data (1, DW_LNS_advance_line,
"advance to line %lu", current_line);
dw2_asm_output_data_sleb128 (line_offset, NULL);
dw2_asm_output_data (1, DW_LNS_copy, "DW_LNS_copy");
}
}
else
dw2_asm_output_data (1, DW_LNS_copy, "DW_LNS_copy");
lt_index++;
/* If we're done with a function, end its sequence. */
if (lt_index == separate_line_info_table_in_use
|| separate_line_info_table[lt_index].function != function)
{
current_file = 1;
current_line = 1;
/* Emit debug info for the address of the end of the function. */
ASM_GENERATE_INTERNAL_LABEL (line_label, FUNC_END_LABEL, function);
if (0)
{
dw2_asm_output_data (1, DW_LNS_fixed_advance_pc,
"DW_LNS_fixed_advance_pc");
dw2_asm_output_delta (2, line_label, prev_line_label, NULL);
}
else
{
dw2_asm_output_data (1, 0, "DW_LNE_set_address");
dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
dw2_asm_output_data (1, DW_LNE_set_address, NULL);
dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
}
/* Output the marker for the end of this sequence. */
dw2_asm_output_data (1, 0, "DW_LNE_end_sequence");
dw2_asm_output_data_uleb128 (1, NULL);
dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
}
}
/* Output the marker for the end of the line number info. */
ASM_OUTPUT_LABEL (asm_out_file, l2);
}
/* Given a pointer to a tree node for some base type, return a pointer to
a DIE that describes the given type.
This routine must only be called for GCC type nodes that correspond to
Dwarf base (fundamental) types. */
static dw_die_ref
base_type_die (type)
tree type;
{
dw_die_ref base_type_result;
const char *type_name;
enum dwarf_type encoding;
tree name = TYPE_NAME (type);
if (TREE_CODE (type) == ERROR_MARK || TREE_CODE (type) == VOID_TYPE)
return 0;
if (name)
{
if (TREE_CODE (name) == TYPE_DECL)
name = DECL_NAME (name);
type_name = IDENTIFIER_POINTER (name);
}
else
type_name = "__unknown__";
switch (TREE_CODE (type))
{
case INTEGER_TYPE:
/* Carefully distinguish the C character types, without messing
up if the language is not C. Note that we check only for the names
that contain spaces; other names might occur by coincidence in other
languages, so we only check if main variant is char_type_node. */
if (! (TYPE_PRECISION (type) == CHAR_TYPE_SIZE
&& (type == char_type_node
|| (TYPE_MAIN_VARIANT (type) == char_type_node
&& ! strcmp (type_name, "char"))
|| ! strcmp (type_name, "signed char")
|| ! strcmp (type_name, "unsigned char"))))
{
if (TREE_UNSIGNED (type))
encoding = DW_ATE_unsigned;
else
encoding = DW_ATE_signed;
break;
}
/* else fall through. */
case CHAR_TYPE:
/* GNU Pascal/Ada CHAR type. Not used in C. */
if (TREE_UNSIGNED (type))
encoding = DW_ATE_unsigned_char;
else
encoding = DW_ATE_signed_char;
break;
case REAL_TYPE:
encoding = DW_ATE_float;
break;
/* Dwarf2 doesn't know anything about complex ints, so use
a user defined type for it. */
case COMPLEX_TYPE:
if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
encoding = DW_ATE_complex_float;
else
encoding = DW_ATE_lo_user;
break;
case BOOLEAN_TYPE:
/* GNU FORTRAN/Ada/C++ BOOLEAN type. */
encoding = DW_ATE_boolean;
break;
default:
/* No other TREE_CODEs are Dwarf fundamental types. */
abort ();
}
base_type_result = new_die (DW_TAG_base_type, comp_unit_die, type);
if (demangle_name_func)
type_name = (*demangle_name_func) (type_name);
add_AT_string (base_type_result, DW_AT_name, type_name);
add_AT_unsigned (base_type_result, DW_AT_byte_size,
int_size_in_bytes (type));
add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
return base_type_result;
}
/* Given a pointer to an arbitrary ..._TYPE tree node, return a pointer to
the Dwarf "root" type for the given input type. The Dwarf "root" type of
a given type is generally the same as the given type, except that if the
given type is a pointer or reference type, then the root type of the given
type is the root type of the "basis" type for the pointer or reference
type. (This definition of the "root" type is recursive.) Also, the root
type of a `const' qualified type or a `volatile' qualified type is the
root type of the given type without the qualifiers. */
static tree
root_type (type)
tree type;
{
if (TREE_CODE (type) == ERROR_MARK)
return error_mark_node;
switch (TREE_CODE (type))
{
case ERROR_MARK:
return error_mark_node;
case POINTER_TYPE:
case REFERENCE_TYPE:
return type_main_variant (root_type (TREE_TYPE (type)));
default:
return type_main_variant (type);
}
}
/* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
given input type is a Dwarf "fundamental" type. Otherwise return null. */
static inline int
is_base_type (type)
tree type;
{
switch (TREE_CODE (type))
{
case ERROR_MARK:
case VOID_TYPE:
case INTEGER_TYPE:
case REAL_TYPE:
case COMPLEX_TYPE:
case BOOLEAN_TYPE:
case CHAR_TYPE:
return 1;
case SET_TYPE:
case ARRAY_TYPE:
case RECORD_TYPE:
case UNION_TYPE:
case QUAL_UNION_TYPE:
case ENUMERAL_TYPE:
case FUNCTION_TYPE:
case METHOD_TYPE:
case POINTER_TYPE:
case REFERENCE_TYPE:
case FILE_TYPE:
case OFFSET_TYPE:
case LANG_TYPE:
case VECTOR_TYPE:
return 0;
default:
abort ();
}
return 0;
}
/* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
entry that chains various modifiers in front of the given type. */
static dw_die_ref
modified_type_die (type, is_const_type, is_volatile_type, context_die)
tree type;
int is_const_type;
int is_volatile_type;
dw_die_ref context_die;
{
enum tree_code code = TREE_CODE (type);
dw_die_ref mod_type_die = NULL;
dw_die_ref sub_die = NULL;
tree item_type = NULL;
if (code != ERROR_MARK)
{
tree qualified_type;
/* See if we already have the appropriately qualified variant of
this type. */
qualified_type
= get_qualified_type (type,
((is_const_type ? TYPE_QUAL_CONST : 0)
| (is_volatile_type
? TYPE_QUAL_VOLATILE : 0)));
/* If we do, then we can just use its DIE, if it exists. */
if (qualified_type)
{
mod_type_die = lookup_type_die (qualified_type);
if (mod_type_die)
return mod_type_die;
}
/* Handle C typedef types. */
if (qualified_type && TYPE_NAME (qualified_type)
&& TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL
&& DECL_ORIGINAL_TYPE (TYPE_NAME (qualified_type)))
{
tree type_name = TYPE_NAME (qualified_type);
tree dtype = TREE_TYPE (type_name);
if (qualified_type == dtype)
{
/* For a named type, use the typedef. */
gen_type_die (qualified_type, context_die);
mod_type_die = lookup_type_die (qualified_type);
}
else if (is_const_type < TYPE_READONLY (dtype)
|| is_volatile_type < TYPE_VOLATILE (dtype))
/* cv-unqualified version of named type. Just use the unnamed
type to which it refers. */
mod_type_die
= modified_type_die (DECL_ORIGINAL_TYPE (type_name),
is_const_type, is_volatile_type,
context_die);
/* Else cv-qualified version of named type; fall through. */
}
if (mod_type_die)
/* OK. */
;
else if (is_const_type)
{
mod_type_die = new_die (DW_TAG_const_type, comp_unit_die, type);
sub_die = modified_type_die (type, 0, is_volatile_type, context_die);
}
else if (is_volatile_type)
{
mod_type_die = new_die (DW_TAG_volatile_type, comp_unit_die, type);
sub_die = modified_type_die (type, 0, 0, context_die);
}
else if (code == POINTER_TYPE)
{
mod_type_die = new_die (DW_TAG_pointer_type, comp_unit_die, type);
add_AT_unsigned (mod_type_die, DW_AT_byte_size, PTR_SIZE);
item_type = TREE_TYPE (type);
}
else if (code == REFERENCE_TYPE)
{
mod_type_die = new_die (DW_TAG_reference_type, comp_unit_die, type);
add_AT_unsigned (mod_type_die, DW_AT_byte_size, PTR_SIZE);
item_type = TREE_TYPE (type);
}
else if (is_base_type (type))
mod_type_die = base_type_die (type);
else
{
gen_type_die (type, context_die);
/* We have to get the type_main_variant here (and pass that to the
`lookup_type_die' routine) because the ..._TYPE node we have
might simply be a *copy* of some original type node (where the
copy was created to help us keep track of typedef names) and
that copy might have a different TYPE_UID from the original
..._TYPE node. */
if (TREE_CODE (type) != VECTOR_TYPE)
mod_type_die = lookup_type_die (type_main_variant (type));
else
/* Vectors have the debugging information in the type,
not the main variant. */
mod_type_die = lookup_type_die (type);
if (mod_type_die == NULL)
abort ();
}
/* We want to equate the qualified type to the die below. */
type = qualified_type;
}
if (type)
equate_type_number_to_die (type, mod_type_die);
if (item_type)
/* We must do this after the equate_type_number_to_die call, in case
this is a recursive type. This ensures that the modified_type_die
recursion will terminate even if the type is recursive. Recursive
types are possible in Ada. */
sub_die = modified_type_die (item_type,
TYPE_READONLY (item_type),
TYPE_VOLATILE (item_type),
context_die);
if (sub_die != NULL)
add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
return mod_type_die;
}
/* Given a pointer to an arbitrary ..._TYPE tree node, return true if it is
an enumerated type. */
static inline int
type_is_enum (type)
tree type;
{
return TREE_CODE (type) == ENUMERAL_TYPE;
}
/* Return the register number described by a given RTL node. */
static unsigned int
reg_number (rtl)
rtx rtl;
{
unsigned regno = REGNO (rtl);
if (regno >= FIRST_PSEUDO_REGISTER)
abort ();
return DBX_REGISTER_NUMBER (regno);
}
/* Return a location descriptor that designates a machine register or
zero if there is no such. */
static dw_loc_descr_ref
reg_loc_descriptor (rtl)
rtx rtl;
{
dw_loc_descr_ref loc_result = NULL;
unsigned reg;
if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
return 0;
reg = reg_number (rtl);
if (reg <= 31)
loc_result = new_loc_descr (DW_OP_reg0 + reg, 0, 0);
else
loc_result = new_loc_descr (DW_OP_regx, reg, 0);
return loc_result;
}
/* Return a location descriptor that designates a constant. */
static dw_loc_descr_ref
int_loc_descriptor (i)
HOST_WIDE_INT i;
{
enum dwarf_location_atom op;
/* Pick the smallest representation of a constant, rather than just
defaulting to the LEB encoding. */
if (i >= 0)
{
if (i <= 31)
op = DW_OP_lit0 + i;
else if (i <= 0xff)
op = DW_OP_const1u;
else if (i <= 0xffff)
op = DW_OP_const2u;
else if (HOST_BITS_PER_WIDE_INT == 32
|| i <= 0xffffffff)
op = DW_OP_const4u;
else
op = DW_OP_constu;
}
else
{
if (i >= -0x80)
op = DW_OP_const1s;
else if (i >= -0x8000)
op = DW_OP_const2s;
else if (HOST_BITS_PER_WIDE_INT == 32
|| i >= -0x80000000)
op = DW_OP_const4s;
else
op = DW_OP_consts;
}
return new_loc_descr (op, i, 0);
}
/* Return a location descriptor that designates a base+offset location. */
static dw_loc_descr_ref
based_loc_descr (reg, offset)
unsigned reg;
long int offset;
{
dw_loc_descr_ref loc_result;
/* For the "frame base", we use the frame pointer or stack pointer
registers, since the RTL for local variables is relative to one of
them. */
unsigned fp_reg = DBX_REGISTER_NUMBER (frame_pointer_needed
? HARD_FRAME_POINTER_REGNUM
: STACK_POINTER_REGNUM);
if (reg == fp_reg)
loc_result = new_loc_descr (DW_OP_fbreg, offset, 0);
else if (reg <= 31)
loc_result = new_loc_descr (DW_OP_breg0 + reg, offset, 0);
else
loc_result = new_loc_descr (DW_OP_bregx, reg, offset);
return loc_result;
}
/* Return true if this RTL expression describes a base+offset calculation. */
static inline int
is_based_loc (rtl)
rtx rtl;
{
return (GET_CODE (rtl) == PLUS
&& ((GET_CODE (XEXP (rtl, 0)) == REG
&& REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
&& GET_CODE (XEXP (rtl, 1)) == CONST_INT)));
}
/* The following routine converts the RTL for a variable or parameter
(resident in memory) into an equivalent Dwarf representation of a
mechanism for getting the address of that same variable onto the top of a
hypothetical "address evaluation" stack.
When creating memory location descriptors, we are effectively transforming
the RTL for a memory-resident object into its Dwarf postfix expression
equivalent. This routine recursively descends an RTL tree, turning
it into Dwarf postfix code as it goes.
MODE is the mode of the memory reference, needed to handle some
autoincrement addressing modes.
Return 0 if we can't represent the location. */
static dw_loc_descr_ref
mem_loc_descriptor (rtl, mode)
rtx rtl;
enum machine_mode mode;
{
dw_loc_descr_ref mem_loc_result = NULL;
/* Note that for a dynamically sized array, the location we will generate a
description of here will be the lowest numbered location which is
actually within the array. That's *not* necessarily the same as the
zeroth element of the array. */
#ifdef ASM_SIMPLIFY_DWARF_ADDR
rtl = ASM_SIMPLIFY_DWARF_ADDR (rtl);
#endif
switch (GET_CODE (rtl))
{
case POST_INC:
case POST_DEC:
case POST_MODIFY:
/* POST_INC and POST_DEC can be handled just like a SUBREG. So we
just fall into the SUBREG code. */
/* ... fall through ... */
case SUBREG:
/* The case of a subreg may arise when we have a local (register)
variable or a formal (register) parameter which doesn't quite fill
up an entire register. For now, just assume that it is
legitimate to make the Dwarf info refer to the whole register which
contains the given subreg. */
rtl = SUBREG_REG (rtl);
/* ... fall through ... */
case REG:
/* Whenever a register number forms a part of the description of the
method for calculating the (dynamic) address of a memory resident
object, DWARF rules require the register number be referred to as
a "base register". This distinction is not based in any way upon
what category of register the hardware believes the given register
belongs to. This is strictly DWARF terminology we're dealing with
here. Note that in cases where the location of a memory-resident
data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
OP_CONST (0)) the actual DWARF location descriptor that we generate
may just be OP_BASEREG (basereg). This may look deceptively like
the object in question was allocated to a register (rather than in
memory) so DWARF consumers need to be aware of the subtle
distinction between OP_REG and OP_BASEREG. */
if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
mem_loc_result = based_loc_descr (reg_number (rtl), 0);
break;
case MEM:
mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (rtl));
if (mem_loc_result != 0)
add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
break;
case LO_SUM:
rtl = XEXP (rtl, 1);
/* ... fall through ... */
case LABEL_REF:
/* Some ports can transform a symbol ref into a label ref, because
the symbol ref is too far away and has to be dumped into a constant
pool. */
case CONST:
case SYMBOL_REF:
/* Alternatively, the symbol in the constant pool might be referenced
by a different symbol. */
if (GET_CODE (rtl) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (rtl))
{
bool marked;
rtx tmp = get_pool_constant_mark (rtl, &marked);
if (GET_CODE (tmp) == SYMBOL_REF)
{
rtl = tmp;
if (CONSTANT_POOL_ADDRESS_P (tmp))
get_pool_constant_mark (tmp, &marked);
else
marked = true;
}
/* If all references to this pool constant were optimized away,
it was not output and thus we can't represent it.
FIXME: might try to use DW_OP_const_value here, though
DW_OP_piece complicates it. */
if (!marked)
return 0;
}
mem_loc_result = new_loc_descr (DW_OP_addr, 0, 0);
mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_addr;
mem_loc_result->dw_loc_oprnd1.v.val_addr = rtl;
VARRAY_PUSH_RTX (used_rtx_varray, rtl);
break;
case PRE_MODIFY:
/* Extract the PLUS expression nested inside and fall into
PLUS code below. */
rtl = XEXP (rtl, 1);
goto plus;
case PRE_INC:
case PRE_DEC:
/* Turn these into a PLUS expression and fall into the PLUS code
below. */
rtl = gen_rtx_PLUS (word_mode, XEXP (rtl, 0),
GEN_INT (GET_CODE (rtl) == PRE_INC
? GET_MODE_UNIT_SIZE (mode)
: -GET_MODE_UNIT_SIZE (mode)));
/* ... fall through ... */
case PLUS:
plus:
if (is_based_loc (rtl))
mem_loc_result = based_loc_descr (reg_number (XEXP (rtl, 0)),
INTVAL (XEXP (rtl, 1)));
else
{
mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode);
if (mem_loc_result == 0)
break;
if (GET_CODE (XEXP (rtl, 1)) == CONST_INT
&& INTVAL (XEXP (rtl, 1)) >= 0)
add_loc_descr (&mem_loc_result,
new_loc_descr (DW_OP_plus_uconst,
INTVAL (XEXP (rtl, 1)), 0));
else
{
add_loc_descr (&mem_loc_result,
mem_loc_descriptor (XEXP (rtl, 1), mode));
add_loc_descr (&mem_loc_result,
new_loc_descr (DW_OP_plus, 0, 0));
}
}
break;
case MULT:
{
/* If a pseudo-reg is optimized away, it is possible for it to
be replaced with a MEM containing a multiply. */
dw_loc_descr_ref op0 = mem_loc_descriptor (XEXP (rtl, 0), mode);
dw_loc_descr_ref op1 = mem_loc_descriptor (XEXP (rtl, 1), mode);
if (op0 == 0 || op1 == 0)
break;
mem_loc_result = op0;
add_loc_descr (&mem_loc_result, op1);
add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
break;
}
case CONST_INT:
mem_loc_result = int_loc_descriptor (INTVAL (rtl));
break;
case ADDRESSOF:
/* If this is a MEM, return its address. Otherwise, we can't
represent this. */
if (GET_CODE (XEXP (rtl, 0)) == MEM)
return mem_loc_descriptor (XEXP (XEXP (rtl, 0), 0), mode);
else
return 0;
default:
abort ();
}
return mem_loc_result;
}
/* Return a descriptor that describes the concatenation of two locations.
This is typically a complex variable. */
static dw_loc_descr_ref
concat_loc_descriptor (x0, x1)
rtx x0, x1;
{
dw_loc_descr_ref cc_loc_result = NULL;
dw_loc_descr_ref x0_ref = loc_descriptor (x0);
dw_loc_descr_ref x1_ref = loc_descriptor (x1);
if (x0_ref == 0 || x1_ref == 0)
return 0;
cc_loc_result = x0_ref;
add_loc_descr (&cc_loc_result,
new_loc_descr (DW_OP_piece,
GET_MODE_SIZE (GET_MODE (x0)), 0));
add_loc_descr (&cc_loc_result, x1_ref);
add_loc_descr (&cc_loc_result,
new_loc_descr (DW_OP_piece,
GET_MODE_SIZE (GET_MODE (x1)), 0));
return cc_loc_result;
}
/* Output a proper Dwarf location descriptor for a variable or parameter
which is either allocated in a register or in a memory location. For a
register, we just generate an OP_REG and the register number. For a
memory location we provide a Dwarf postfix expression describing how to
generate the (dynamic) address of the object onto the address stack.
If we don't know how to describe it, return 0. */
static dw_loc_descr_ref
loc_descriptor (rtl)
rtx rtl;
{
dw_loc_descr_ref loc_result = NULL;
switch (GET_CODE (rtl))
{
case SUBREG:
/* The case of a subreg may arise when we have a local (register)
variable or a formal (register) parameter which doesn't quite fill
up an entire register. For now, just assume that it is
legitimate to make the Dwarf info refer to the whole register which
contains the given subreg. */
rtl = SUBREG_REG (rtl);
/* ... fall through ... */
case REG:
loc_result = reg_loc_descriptor (rtl);
break;
case MEM:
loc_result = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (rtl));
break;
case CONCAT:
loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1));
break;
default:
abort ();
}
return loc_result;
}
/* Similar, but generate the descriptor from trees instead of rtl. This comes
up particularly with variable length arrays. If ADDRESSP is nonzero, we are
looking for an address. Otherwise, we return a value. If we can't make a
descriptor, return 0. */
static dw_loc_descr_ref
loc_descriptor_from_tree (loc, addressp)
tree loc;
int addressp;
{
dw_loc_descr_ref ret, ret1;
int indirect_p = 0;
int unsignedp = TREE_UNSIGNED (TREE_TYPE (loc));
enum dwarf_location_atom op;
/* ??? Most of the time we do not take proper care for sign/zero
extending the values properly. Hopefully this won't be a real
problem... */
switch (TREE_CODE (loc))
{
case ERROR_MARK:
return 0;
case WITH_RECORD_EXPR:
case PLACEHOLDER_EXPR:
/* This case involves extracting fields from an object to determine the
position of other fields. We don't try to encode this here. The
only user of this is Ada, which encodes the needed information using
the names of types. */
return 0;
case CALL_EXPR:
return 0;
case ADDR_EXPR:
/* We can support this only if we can look through conversions and
find an INDIRECT_EXPR. */
for (loc = TREE_OPERAND (loc, 0);
TREE_CODE (loc) == CONVERT_EXPR || TREE_CODE (loc) == NOP_EXPR
|| TREE_CODE (loc) == NON_LVALUE_EXPR
|| TREE_CODE (loc) == VIEW_CONVERT_EXPR
|| TREE_CODE (loc) == SAVE_EXPR;
loc = TREE_OPERAND (loc, 0))
;
return (TREE_CODE (loc) == INDIRECT_REF
? loc_descriptor_from_tree (TREE_OPERAND (loc, 0), addressp)
: 0);
case VAR_DECL:
if (DECL_THREAD_LOCAL (loc))
{
rtx rtl;
#ifndef ASM_OUTPUT_DWARF_DTPREL
/* If this is not defined, we have no way to emit the data. */
return 0;
#endif
/* The way DW_OP_GNU_push_tls_address is specified, we can only
look up addresses of objects in the current module. */
if (DECL_EXTERNAL (loc))
return 0;
rtl = rtl_for_decl_location (loc);
if (rtl == NULL_RTX)
return 0;
if (GET_CODE (rtl) != MEM)
return 0;
rtl = XEXP (rtl, 0);
if (! CONSTANT_P (rtl))
return 0;
ret = new_loc_descr (INTERNAL_DW_OP_tls_addr, 0, 0);
ret->dw_loc_oprnd1.val_class = dw_val_class_addr;
ret->dw_loc_oprnd1.v.val_addr = rtl;
ret1 = new_loc_descr (DW_OP_GNU_push_tls_address, 0, 0);
add_loc_descr (&ret, ret1);
indirect_p = 1;
break;
}
/* FALLTHRU */
case PARM_DECL:
{
rtx rtl = rtl_for_decl_location (loc);
if (rtl == NULL_RTX)
return 0;
else if (CONSTANT_P (rtl))
{
ret = new_loc_descr (DW_OP_addr, 0, 0);
ret->dw_loc_oprnd1.val_class = dw_val_class_addr;
ret->dw_loc_oprnd1.v.val_addr = rtl;
indirect_p = 1;
}
else
{
enum machine_mode mode = GET_MODE (rtl);
if (GET_CODE (rtl) == MEM)
{
indirect_p = 1;
rtl = XEXP (rtl, 0);
}
ret = mem_loc_descriptor (rtl, mode);
}
}
break;
case INDIRECT_REF:
ret = loc_descriptor_from_tree (TREE_OPERAND (loc, 0), 0);
indirect_p = 1;
break;
case COMPOUND_EXPR:
return loc_descriptor_from_tree (TREE_OPERAND (loc, 1), addressp);
case NOP_EXPR:
case CONVERT_EXPR:
case NON_LVALUE_EXPR:
case VIEW_CONVERT_EXPR:
case SAVE_EXPR:
return loc_descriptor_from_tree (TREE_OPERAND (loc, 0), addressp);
case COMPONENT_REF:
case BIT_FIELD_REF:
case ARRAY_REF:
case ARRAY_RANGE_REF:
{
tree obj, offset;
HOST_WIDE_INT bitsize, bitpos, bytepos;
enum machine_mode mode;
int volatilep;
obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
&unsignedp, &volatilep);
if (obj == loc)
return 0;
ret = loc_descriptor_from_tree (obj, 1);
if (ret == 0
|| bitpos % BITS_PER_UNIT != 0 || bitsize % BITS_PER_UNIT != 0)
return 0;
if (offset != NULL_TREE)
{
/* Variable offset. */
add_loc_descr (&ret, loc_descriptor_from_tree (offset, 0));
add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
}
if (!addressp)
indirect_p = 1;
bytepos = bitpos / BITS_PER_UNIT;
if (bytepos > 0)
add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, bytepos, 0));
else if (bytepos < 0)
{
add_loc_descr (&ret, int_loc_descriptor (bytepos));
add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
}
break;
}
case INTEGER_CST:
if (host_integerp (loc, 0))
ret = int_loc_descriptor (tree_low_cst (loc, 0));
else
return 0;
break;
case TRUTH_AND_EXPR:
case TRUTH_ANDIF_EXPR:
case BIT_AND_EXPR:
op = DW_OP_and;
goto do_binop;
case TRUTH_XOR_EXPR:
case BIT_XOR_EXPR:
op = DW_OP_xor;
goto do_binop;
case TRUTH_OR_EXPR:
case TRUTH_ORIF_EXPR:
case BIT_IOR_EXPR:
op = DW_OP_or;
goto do_binop;
case TRUNC_DIV_EXPR:
op = DW_OP_div;
goto do_binop;
case MINUS_EXPR:
op = DW_OP_minus;
goto do_binop;
case TRUNC_MOD_EXPR:
op = DW_OP_mod;
goto do_binop;
case MULT_EXPR:
op = DW_OP_mul;
goto do_binop;
case LSHIFT_EXPR:
op = DW_OP_shl;
goto do_binop;
case RSHIFT_EXPR:
op = (unsignedp ? DW_OP_shr : DW_OP_shra);
goto do_binop;
case PLUS_EXPR:
if (TREE_CODE (TREE_OPERAND (loc, 1)) == INTEGER_CST
&& host_integerp (TREE_OPERAND (loc, 1), 0))
{
ret = loc_descriptor_from_tree (TREE_OPERAND (loc, 0), 0);
if (ret == 0)
return 0;
add_loc_descr (&ret,
new_loc_descr (DW_OP_plus_uconst,
tree_low_cst (TREE_OPERAND (loc, 1),
0),
0));
break;
}
op = DW_OP_plus;
goto do_binop;
case LE_EXPR:
if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
return 0;
op = DW_OP_le;
goto do_binop;
case GE_EXPR:
if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
return 0;
op = DW_OP_ge;
goto do_binop;
case LT_EXPR:
if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
return 0;
op = DW_OP_lt;
goto do_binop;
case GT_EXPR:
if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
return 0;
op = DW_OP_gt;
goto do_binop;
case EQ_EXPR:
op = DW_OP_eq;
goto do_binop;
case NE_EXPR:
op = DW_OP_ne;
goto do_binop;
do_binop:
ret = loc_descriptor_from_tree (TREE_OPERAND (loc, 0), 0);
ret1 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0);
if (ret == 0 || ret1 == 0)
return 0;
add_loc_descr (&ret, ret1);
add_loc_descr (&ret, new_loc_descr (op, 0, 0));
break;
case TRUTH_NOT_EXPR:
case BIT_NOT_EXPR:
op = DW_OP_not;
goto do_unop;
case ABS_EXPR:
op = DW_OP_abs;
goto do_unop;
case NEGATE_EXPR:
op = DW_OP_neg;
goto do_unop;
do_unop:
ret = loc_descriptor_from_tree (TREE_OPERAND (loc, 0), 0);
if (ret == 0)
return 0;
add_loc_descr (&ret, new_loc_descr (op, 0, 0));
break;
case MAX_EXPR:
loc = build (COND_EXPR, TREE_TYPE (loc),
build (LT_EXPR, integer_type_node,
TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
/* ... fall through ... */
case COND_EXPR:
{
dw_loc_descr_ref lhs
= loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0);
dw_loc_descr_ref rhs
= loc_descriptor_from_tree (TREE_OPERAND (loc, 2), 0);
dw_loc_descr_ref bra_node, jump_node, tmp;
ret = loc_descriptor_from_tree (TREE_OPERAND (loc, 0), 0);
if (ret == 0 || lhs == 0 || rhs == 0)
return 0;
bra_node = new_loc_descr (DW_OP_bra, 0, 0);
add_loc_descr (&ret, bra_node);
add_loc_descr (&ret, rhs);
jump_node = new_loc_descr (DW_OP_skip, 0, 0);
add_loc_descr (&ret, jump_node);
add_loc_descr (&ret, lhs);
bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
bra_node->dw_loc_oprnd1.v.val_loc = lhs;
/* ??? Need a node to point the skip at. Use a nop. */
tmp = new_loc_descr (DW_OP_nop, 0, 0);
add_loc_descr (&ret, tmp);
jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
jump_node->dw_loc_oprnd1.v.val_loc = tmp;
}
break;
default:
abort ();
}
/* Show if we can't fill the request for an address. */
if (addressp && indirect_p == 0)
return 0;
/* If we've got an address and don't want one, dereference. */
if (!addressp && indirect_p > 0)
{
HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
if (size > DWARF2_ADDR_SIZE || size == -1)
return 0;
else if (size == DWARF2_ADDR_SIZE)
op = DW_OP_deref;
else
op = DW_OP_deref_size;
add_loc_descr (&ret, new_loc_descr (op, size, 0));
}
return ret;
}
/* Given a value, round it up to the lowest multiple of `boundary'
which is not less than the value itself. */
static inline HOST_WIDE_INT
ceiling (value, boundary)
HOST_WIDE_INT value;
unsigned int boundary;
{
return (((value + boundary - 1) / boundary) * boundary);
}
/* Given a pointer to what is assumed to be a FIELD_DECL node, return a
pointer to the declared type for the relevant field variable, or return
`integer_type_node' if the given node turns out to be an
ERROR_MARK node. */
static inline tree
field_type (decl)
tree decl;
{
tree type;
if (TREE_CODE (decl) == ERROR_MARK)
return integer_type_node;
type = DECL_BIT_FIELD_TYPE (decl);
if (type == NULL_TREE)
type = TREE_TYPE (decl);
return type;
}
/* Given a pointer to a tree node, return the alignment in bits for
it, or else return BITS_PER_WORD if the node actually turns out to
be an ERROR_MARK node. */
static inline unsigned
simple_type_align_in_bits (type)
tree type;
{
return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
}
static inline unsigned
simple_decl_align_in_bits (decl)
tree decl;
{
return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
}
/* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
node, return the size in bits for the type if it is a constant, or else
return the alignment for the type if the type's size is not constant, or
else return BITS_PER_WORD if the type actually turns out to be an
ERROR_MARK node. */
static inline unsigned HOST_WIDE_INT
simple_type_size_in_bits (type)
tree type;
{
if (TREE_CODE (type) == ERROR_MARK)
return BITS_PER_WORD;
else if (TYPE_SIZE (type) == NULL_TREE)
return 0;
else if (host_integerp (TYPE_SIZE (type), 1))
return tree_low_cst (TYPE_SIZE (type), 1);
else
return TYPE_ALIGN (type);
}
/* Given a pointer to a FIELD_DECL, compute and return the byte offset of the
lowest addressed byte of the "containing object" for the given FIELD_DECL,
or return 0 if we are unable to determine what that offset is, either
because the argument turns out to be a pointer to an ERROR_MARK node, or
because the offset is actually variable. (We can't handle the latter case
just yet). */
static HOST_WIDE_INT
field_byte_offset (decl)
tree decl;
{
unsigned int type_align_in_bits;
unsigned int decl_align_in_bits;
unsigned HOST_WIDE_INT type_size_in_bits;
HOST_WIDE_INT object_offset_in_bits;
tree type;
tree field_size_tree;
HOST_WIDE_INT bitpos_int;
HOST_WIDE_INT deepest_bitpos;
unsigned HOST_WIDE_INT field_size_in_bits;
if (TREE_CODE (decl) == ERROR_MARK)
return 0;
else if (TREE_CODE (decl) != FIELD_DECL)
abort ();
type = field_type (decl);
field_size_tree = DECL_SIZE (decl);
/* The size could be unspecified if there was an error, or for
a flexible array member. */
if (! field_size_tree)
field_size_tree = bitsize_zero_node;
/* We cannot yet cope with fields whose positions are variable, so
for now, when we see such things, we simply return 0. Someday, we may
be able to handle such cases, but it will be damn difficult. */
if (! host_integerp (bit_position (decl), 0))
return 0;
bitpos_int = int_bit_position (decl);
/* If we don't know the size of the field, pretend it's a full word. */
if (host_integerp (field_size_tree, 1))
field_size_in_bits = tree_low_cst (field_size_tree, 1);
else
field_size_in_bits = BITS_PER_WORD;
type_size_in_bits = simple_type_size_in_bits (type);
type_align_in_bits = simple_type_align_in_bits (type);
decl_align_in_bits = simple_decl_align_in_bits (decl);
/* The GCC front-end doesn't make any attempt to keep track of the starting
bit offset (relative to the start of the containing structure type) of the
hypothetical "containing object" for a bit-field. Thus, when computing
the byte offset value for the start of the "containing object" of a
bit-field, we must deduce this information on our own. This can be rather
tricky to do in some cases. For example, handling the following structure
type definition when compiling for an i386/i486 target (which only aligns
long long's to 32-bit boundaries) can be very tricky:
struct S { int field1; long long field2:31; };
Fortunately, there is a simple rule-of-thumb which can be used in such
cases. When compiling for an i386/i486, GCC will allocate 8 bytes for the
structure shown above. It decides to do this based upon one simple rule
for bit-field allocation. GCC allocates each "containing object" for each
bit-field at the first (i.e. lowest addressed) legitimate alignment
boundary (based upon the required minimum alignment for the declared type
of the field) which it can possibly use, subject to the condition that
there is still enough available space remaining in the containing object
(when allocated at the selected point) to fully accommodate all of the
bits of the bit-field itself.
This simple rule makes it obvious why GCC allocates 8 bytes for each
object of the structure type shown above. When looking for a place to
allocate the "containing object" for `field2', the compiler simply tries
to allocate a 64-bit "containing object" at each successive 32-bit
boundary (starting at zero) until it finds a place to allocate that 64-
bit field such that at least 31 contiguous (and previously unallocated)
bits remain within that selected 64 bit field. (As it turns out, for the
example above, the compiler finds it is OK to allocate the "containing
object" 64-bit field at bit-offset zero within the structure type.)
Here we attempt to work backwards from the limited set of facts we're
given, and we try to deduce from those facts, where GCC must have believed
that the containing object started (within the structure type). The value
we deduce is then used (by the callers of this routine) to generate
DW_AT_location and DW_AT_bit_offset attributes for fields (both bit-fields
and, in the case of DW_AT_location, regular fields as well). */
/* Figure out the bit-distance from the start of the structure to the
"deepest" bit of the bit-field. */
deepest_bitpos = bitpos_int + field_size_in_bits;
/* This is the tricky part. Use some fancy footwork to deduce where the
lowest addressed bit of the containing object must be. */
object_offset_in_bits = deepest_bitpos - type_size_in_bits;
/* Round up to type_align by default. This works best for bitfields. */
object_offset_in_bits += type_align_in_bits - 1;
object_offset_in_bits /= type_align_in_bits;
object_offset_in_bits *= type_align_in_bits;
if (object_offset_in_bits > bitpos_int)
{
/* Sigh, the decl must be packed. */
object_offset_in_bits = deepest_bitpos - type_size_in_bits;
/* Round up to decl_align instead. */
object_offset_in_bits += decl_align_in_bits - 1;
object_offset_in_bits /= decl_align_in_bits;
object_offset_in_bits *= decl_align_in_bits;
}
return object_offset_in_bits / BITS_PER_UNIT;
}
/* The following routines define various Dwarf attributes and any data
associated with them. */
/* Add a location description attribute value to a DIE.
This emits location attributes suitable for whole variables and
whole parameters. Note that the location attributes for struct fields are
generated by the routine `data_member_location_attribute' below. */
static inline void
add_AT_location_description (die, attr_kind, descr)
dw_die_ref die;
enum dwarf_attribute attr_kind;
dw_loc_descr_ref descr;
{
if (descr != 0)
add_AT_loc (die, attr_kind, descr);
}
/* Attach the specialized form of location attribute used for data members of
struct and union types. In the special case of a FIELD_DECL node which
represents a bit-field, the "offset" part of this special location
descriptor must indicate the distance in bytes from the lowest-addressed
byte of the containing struct or union type to the lowest-addressed byte of
the "containing object" for the bit-field. (See the `field_byte_offset'
function above).
For any given bit-field, the "containing object" is a hypothetical object
(of some integral or enum type) within which the given bit-field lives. The
type of this hypothetical "containing object" is always the same as the
declared type of the individual bit-field itself (for GCC anyway... the
DWARF spec doesn't actually mandate this). Note that it is the size (in
bytes) of the hypothetical "containing object" which will be given in the
DW_AT_byte_size attribute for this bit-field. (See the
`byte_size_attribute' function below.) It is also used when calculating the
value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
function below.) */
static void
add_data_member_location_attribute (die, decl)
dw_die_ref die;
tree decl;
{
long offset;
dw_loc_descr_ref loc_descr = 0;
if (TREE_CODE (decl) == TREE_VEC)
{
/* We're working on the TAG_inheritance for a base class. */
if (TREE_VIA_VIRTUAL (decl) && is_cxx ())
{
/* For C++ virtual bases we can't just use BINFO_OFFSET, as they
aren't at a fixed offset from all (sub)objects of the same
type. We need to extract the appropriate offset from our
vtable. The following dwarf expression means
BaseAddr = ObAddr + *((*ObAddr) - Offset)
This is specific to the V3 ABI, of course. */
dw_loc_descr_ref tmp;
/* Make a copy of the object address. */
tmp = new_loc_descr (DW_OP_dup, 0, 0);
add_loc_descr (&loc_descr, tmp);
/* Extract the vtable address. */
tmp = new_loc_descr (DW_OP_deref, 0, 0);
add_loc_descr (&loc_descr, tmp);
/* Calculate the address of the offset. */
offset = tree_low_cst (BINFO_VPTR_FIELD (decl), 0);
if (offset >= 0)
abort ();
tmp = int_loc_descriptor (-offset);
add_loc_descr (&loc_descr, tmp);
tmp = new_loc_descr (DW_OP_minus, 0, 0);
add_loc_descr (&loc_descr, tmp);
/* Extract the offset. */
tmp = new_loc_descr (DW_OP_deref, 0, 0);
add_loc_descr (&loc_descr, tmp);
/* Add it to the object address. */
tmp = new_loc_descr (DW_OP_plus, 0, 0);
add_loc_descr (&loc_descr, tmp);
}
else
offset = tree_low_cst (BINFO_OFFSET (decl), 0);
}
else
offset = field_byte_offset (decl);
if (! loc_descr)
{
enum dwarf_location_atom op;
/* The DWARF2 standard says that we should assume that the structure
address is already on the stack, so we can specify a structure field
address by using DW_OP_plus_uconst. */
#ifdef MIPS_DEBUGGING_INFO
/* ??? The SGI dwarf reader does not handle the DW_OP_plus_uconst
operator correctly. It works only if we leave the offset on the
stack. */
op = DW_OP_constu;
#else
op = DW_OP_plus_uconst;
#endif
loc_descr = new_loc_descr (op, offset, 0);
}
add_AT_loc (die, DW_AT_data_member_location, loc_descr);
}
/* Attach an DW_AT_const_value attribute for a variable or a parameter which
does not have a "location" either in memory or in a register. These
things can arise in GNU C when a constant is passed as an actual parameter
to an inlined function. They can also arise in C++ where declared
constants do not necessarily get memory "homes". */
static void
add_const_value_attribute (die, rtl)
dw_die_ref die;
rtx rtl;
{
switch (GET_CODE (rtl))
{
case CONST_INT:
/* Note that a CONST_INT rtx could represent either an integer
or a floating-point constant. A CONST_INT is used whenever
the constant will fit into a single word. In all such
cases, the original mode of the constant value is wiped
out, and the CONST_INT rtx is assigned VOIDmode. */
{
HOST_WIDE_INT val = INTVAL (rtl);
/* ??? We really should be using HOST_WIDE_INT throughout. */
if (val < 0 && (long) val == val)
add_AT_int (die, DW_AT_const_value, (long) val);
else if ((unsigned long) val == (unsigned HOST_WIDE_INT) val)
add_AT_unsigned (die, DW_AT_const_value, (unsigned long) val);
else
{
#if HOST_BITS_PER_LONG * 2 == HOST_BITS_PER_WIDE_INT
add_AT_long_long (die, DW_AT_const_value,
val >> HOST_BITS_PER_LONG, val);
#else
abort ();
#endif
}
}
break;
case CONST_DOUBLE:
/* Note that a CONST_DOUBLE rtx could represent either an integer or a
floating-point constant. A CONST_DOUBLE is used whenever the
constant requires more than one word in order to be adequately
represented. We output CONST_DOUBLEs as blocks. */
{
enum machine_mode mode = GET_MODE (rtl);
if (GET_MODE_CLASS (mode) == MODE_FLOAT)
{
unsigned length = GET_MODE_SIZE (mode) / 4;
long *array = (long *) xmalloc (sizeof (long) * length);
REAL_VALUE_TYPE rv;
REAL_VALUE_FROM_CONST_DOUBLE (rv, rtl);
switch (mode)
{
case SFmode:
REAL_VALUE_TO_TARGET_SINGLE (rv, array[0]);
break;
case DFmode:
REAL_VALUE_TO_TARGET_DOUBLE (rv, array);
break;
case XFmode:
case TFmode:
REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, array);
break;
default:
abort ();
}
add_AT_float (die, DW_AT_const_value, length, array);
}
else
{
/* ??? We really should be using HOST_WIDE_INT throughout. */
if (HOST_BITS_PER_LONG != HOST_BITS_PER_WIDE_INT)
abort ();
add_AT_long_long (die, DW_AT_const_value,
CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
}
}
break;
case CONST_STRING:
add_AT_string (die, DW_AT_const_value, XSTR (rtl, 0));
break;
case SYMBOL_REF:
case LABEL_REF:
case CONST:
add_AT_addr (die, DW_AT_const_value, rtl);
VARRAY_PUSH_RTX (used_rtx_varray, rtl);
break;
case PLUS:
/* In cases where an inlined instance of an inline function is passed
the address of an `auto' variable (which is local to the caller) we
can get a situation where the DECL_RTL of the artificial local
variable (for the inlining) which acts as a stand-in for the
corresponding formal parameter (of the inline function) will look
like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
exactly a compile-time constant expression, but it isn't the address
of the (artificial) local variable either. Rather, it represents the
*value* which the artificial local variable always has during its
lifetime. We currently have no way to represent such quasi-constant
values in Dwarf, so for now we just punt and generate nothing. */
break;
default:
/* No other kinds of rtx should be possible here. */
abort ();
}
}
static rtx
rtl_for_decl_location (decl)
tree decl;
{
rtx rtl;
/* Here we have to decide where we are going to say the parameter "lives"
(as far as the debugger is concerned). We only have a couple of
choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
DECL_RTL normally indicates where the parameter lives during most of the
activation of the function. If optimization is enabled however, this
could be either NULL or else a pseudo-reg. Both of those cases indicate
that the parameter doesn't really live anywhere (as far as the code
generation parts of GCC are concerned) during most of the function's
activation. That will happen (for example) if the parameter is never
referenced within the function.
We could just generate a location descriptor here for all non-NULL
non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
a little nicer than that if we also consider DECL_INCOMING_RTL in cases
where DECL_RTL is NULL or is a pseudo-reg.
Note however that we can only get away with using DECL_INCOMING_RTL as
a backup substitute for DECL_RTL in certain limited cases. In cases
where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
we can be sure that the parameter was passed using the same type as it is
declared to have within the function, and that its DECL_INCOMING_RTL
points us to a place where a value of that type is passed.
In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
because in these cases DECL_INCOMING_RTL points us to a value of some
type which is *different* from the type of the parameter itself. Thus,
if we tried to use DECL_INCOMING_RTL to generate a location attribute in
such cases, the debugger would end up (for example) trying to fetch a
`float' from a place which actually contains the first part of a
`double'. That would lead to really incorrect and confusing
output at debug-time.
So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
are a couple of exceptions however. On little-endian machines we can
get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
an integral type that is smaller than TREE_TYPE (decl). These cases arise
when (on a little-endian machine) a non-prototyped function has a
parameter declared to be of type `short' or `char'. In such cases,
TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
passed `int' value. If the debugger then uses that address to fetch
a `short' or a `char' (on a little-endian machine) the result will be
the correct data, so we allow for such exceptional cases below.
Note that our goal here is to describe the place where the given formal
parameter lives during most of the function's activation (i.e. between the
end of the prologue and the start of the epilogue). We'll do that as best
as we can. Note however that if the given formal parameter is modified
sometime during the execution of the function, then a stack backtrace (at
debug-time) will show the function as having been called with the *new*
value rather than the value which was originally passed in. This happens
rarely enough that it is not a major problem, but it *is* a problem, and
I'd like to fix it.
A future version of dwarf2out.c may generate two additional attributes for
any given DW_TAG_formal_parameter DIE which will describe the "passed
type" and the "passed location" for the given formal parameter in addition
to the attributes we now generate to indicate the "declared type" and the
"active location" for each parameter. This additional set of attributes
could be used by debuggers for stack backtraces. Separately, note that
sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
This happens (for example) for inlined-instances of inline function formal
parameters which are never referenced. This really shouldn't be
happening. All PARM_DECL nodes should get valid non-NULL
DECL_INCOMING_RTL values, but integrate.c doesn't currently generate these
values for inlined instances of inline function parameters, so when we see
such cases, we are just out-of-luck for the time being (until integrate.c
gets fixed). */
/* Use DECL_RTL as the "location" unless we find something better. */
rtl = DECL_RTL_IF_SET (decl);
/* When generating abstract instances, ignore everything except
constants, symbols living in memory, and symbols living in
fixed registers. */
if (! reload_completed)
{
if (rtl
&& (CONSTANT_P (rtl)
|| (GET_CODE (rtl) == MEM
&& CONSTANT_P (XEXP (rtl, 0)))
|| (GET_CODE (rtl) == REG
&& TREE_CODE (decl) == VAR_DECL
&& TREE_STATIC (decl))))
{
#ifdef ASM_SIMPLIFY_DWARF_ADDR
rtl = ASM_SIMPLIFY_DWARF_ADDR (rtl);
#endif
return rtl;
}
rtl = NULL_RTX;
}
else if (TREE_CODE (decl) == PARM_DECL)
{
if (rtl == NULL_RTX || is_pseudo_reg (rtl))
{
tree declared_type = type_main_variant (TREE_TYPE (decl));
tree passed_type = type_main_variant (DECL_ARG_TYPE (decl));
/* This decl represents a formal parameter which was optimized out.
Note that DECL_INCOMING_RTL may be NULL in here, but we handle
all cases where (rtl == NULL_RTX) just below. */
if (declared_type == passed_type)
rtl = DECL_INCOMING_RTL (decl);
else if (! BYTES_BIG_ENDIAN
&& TREE_CODE (declared_type) == INTEGER_TYPE
&& (GET_MODE_SIZE (TYPE_MODE (declared_type))
<= GET_MODE_SIZE (TYPE_MODE (passed_type))))
rtl = DECL_INCOMING_RTL (decl);
}
/* If the parm was passed in registers, but lives on the stack, then
make a big endian correction if the mode of the type of the
parameter is not the same as the mode of the rtl. */
/* ??? This is the same series of checks that are made in dbxout.c before
we reach the big endian correction code there. It isn't clear if all
of these checks are necessary here, but keeping them all is the safe
thing to do. */
else if (GET_CODE (rtl) == MEM
&& XEXP (rtl, 0) != const0_rtx
&& ! CONSTANT_P (XEXP (rtl, 0))
/* Not passed in memory. */
&& GET_CODE (DECL_INCOMING_RTL (decl)) != MEM
/* Not passed by invisible reference. */
&& (GET_CODE (XEXP (rtl, 0)) != REG
|| REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
|| REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
|| REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
#endif
)
/* Big endian correction check. */
&& BYTES_BIG_ENDIAN
&& TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
&& (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl)))
< UNITS_PER_WORD))
{
int offset = (UNITS_PER_WORD
- GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
plus_constant (XEXP (rtl, 0), offset));
}
}
if (rtl != NULL_RTX)
{
rtl = eliminate_regs (rtl, 0, NULL_RTX);
#ifdef LEAF_REG_REMAP
if (current_function_uses_only_leaf_regs)
leaf_renumber_regs_insn (rtl);
#endif
}
/* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
and will have been substituted directly into all expressions that use it.
C does not have such a concept, but C++ and other languages do. */
else if (TREE_CODE (decl) == VAR_DECL && DECL_INITIAL (decl))
{
/* If a variable is initialized with a string constant without embedded
zeros, build CONST_STRING. */
if (TREE_CODE (DECL_INITIAL (decl)) == STRING_CST
&& TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
{
tree arrtype = TREE_TYPE (decl);
tree enttype = TREE_TYPE (arrtype);
tree domain = TYPE_DOMAIN (arrtype);
tree init = DECL_INITIAL (decl);
enum machine_mode mode = TYPE_MODE (enttype);
if (GET_MODE_CLASS (mode) == MODE_INT && GET_MODE_SIZE (mode) == 1
&& domain
&& integer_zerop (TYPE_MIN_VALUE (domain))
&& compare_tree_int (TYPE_MAX_VALUE (domain),
TREE_STRING_LENGTH (init) - 1) == 0
&& ((size_t) TREE_STRING_LENGTH (init)
== strlen (TREE_STRING_POINTER (init)) + 1))
rtl = gen_rtx_CONST_STRING (VOIDmode, TREE_STRING_POINTER (init));
}
/* If the initializer is something that we know will expand into an
immediate RTL constant, expand it now. Expanding anything else
tends to produce unresolved symbols; see debug/5770 and c++/6381. */
else if (TREE_CODE (DECL_INITIAL (decl)) == INTEGER_CST
|| TREE_CODE (DECL_INITIAL (decl)) == REAL_CST)
{
rtl = expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
EXPAND_INITIALIZER);
/* If expand_expr returns a MEM, it wasn't immediate. */
if (rtl && GET_CODE (rtl) == MEM)
abort ();
}
}
#ifdef ASM_SIMPLIFY_DWARF_ADDR
if (rtl)
rtl = ASM_SIMPLIFY_DWARF_ADDR (rtl);
#endif
/* If we don't look past the constant pool, we risk emitting a
reference to a constant pool entry that isn't referenced from
code, and thus is not emitted. */
if (rtl)
rtl = avoid_constant_pool_reference (rtl);
return rtl;
}
/* Generate *either* an DW_AT_location attribute or else an DW_AT_const_value
data attribute for a variable or a parameter. We generate the
DW_AT_const_value attribute only in those cases where the given variable
or parameter does not have a true "location" either in memory or in a
register. This can happen (for example) when a constant is passed as an
actual argument in a call to an inline function. (It's possible that
these things can crop up in other ways also.) Note that one type of
constant value which can be passed into an inlined function is a constant
pointer. This can happen for example if an actual argument in an inlined
function call evaluates to a compile-time constant address. */
static void
add_location_or_const_value_attribute (die, decl)
dw_die_ref die;
tree decl;
{
rtx rtl;
dw_loc_descr_ref descr;
if (TREE_CODE (decl) == ERROR_MARK)
return;
else if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != PARM_DECL)
abort ();
rtl = rtl_for_decl_location (decl);
if (rtl == NULL_RTX)
return;
switch (GET_CODE (rtl))
{
case ADDRESSOF:
/* The address of a variable that was optimized away;
don't emit anything. */
break;
case CONST_INT:
case CONST_DOUBLE:
case CONST_STRING:
case SYMBOL_REF:
case LABEL_REF:
case CONST:
case PLUS:
/* DECL_RTL could be (plus (reg ...) (const_int ...)) */
add_const_value_attribute (die, rtl);
break;
case MEM:
if (TREE_CODE (decl) == VAR_DECL && DECL_THREAD_LOCAL (decl))
{
/* Need loc_descriptor_from_tree since that's where we know
how to handle TLS variables. Want the object's address
since the top-level DW_AT_location assumes such. See
the confusion in loc_descriptor for reference. */
descr = loc_descriptor_from_tree (decl, 1);
}
else
{
case REG:
case SUBREG:
case CONCAT:
descr = loc_descriptor (rtl);
}
add_AT_location_description (die, DW_AT_location, descr);
break;
default:
abort ();
}
}
/* If we don't have a copy of this variable in memory for some reason (such
as a C++ member constant that doesn't have an out-of-line definition),
we should tell the debugger about the constant value. */
static void
tree_add_const_value_attribute (var_die, decl)
dw_die_ref var_die;
tree decl;
{
tree init = DECL_INITIAL (decl);
tree type = TREE_TYPE (decl);
if (TREE_READONLY (decl) && ! TREE_THIS_VOLATILE (decl) && init
&& initializer_constant_valid_p (init, type) == null_pointer_node)
/* OK */;
else
return;
switch (TREE_CODE (type))
{
case INTEGER_TYPE:
if (host_integerp (init, 0))
add_AT_unsigned (var_die, DW_AT_const_value,
tree_low_cst (init, 0));
else
add_AT_long_long (var_die, DW_AT_const_value,
TREE_INT_CST_HIGH (init),
TREE_INT_CST_LOW (init));
break;
default:;
}
}
/* Generate an DW_AT_name attribute given some string value to be included as
the value of the attribute. */
static inline void
add_name_attribute (die, name_string)
dw_die_ref die;
const char *name_string;
{
if (name_string != NULL && *name_string != 0)
{
if (demangle_name_func)
name_string = (*demangle_name_func) (name_string);
add_AT_string (die, DW_AT_name, name_string);
}
}
/* Given a tree node describing an array bound (either lower or upper) output
a representation for that bound. */
static void
add_bound_info (subrange_die, bound_attr, bound)
dw_die_ref subrange_die;
enum dwarf_attribute bound_attr;
tree bound;
{
switch (TREE_CODE (bound))
{
case ERROR_MARK:
return;
/* All fixed-bounds are represented by INTEGER_CST nodes. */
case INTEGER_CST:
if (! host_integerp (bound, 0)
|| (bound_attr == DW_AT_lower_bound
&& (((is_c_family () || is_java ()) && integer_zerop (bound))
|| (is_fortran () && integer_onep (bound)))))
/* use the default */
;
else
add_AT_unsigned (subrange_die, bound_attr, tree_low_cst (bound, 0));
break;
case CONVERT_EXPR:
case NOP_EXPR:
case NON_LVALUE_EXPR:
case VIEW_CONVERT_EXPR:
add_bound_info (subrange_die, bound_attr, TREE_OPERAND (bound, 0));
break;
case SAVE_EXPR:
/* If optimization is turned on, the SAVE_EXPRs that describe how to
access the upper bound values may be bogus. If they refer to a
register, they may only describe how to get at these values at the
points in the generated code right after they have just been
computed. Worse yet, in the typical case, the upper bound values
will not even *be* computed in the optimized code (though the
number of elements will), so these SAVE_EXPRs are entirely
bogus. In order to compensate for this fact, we check here to see
if optimization is enabled, and if so, we don't add an attribute
for the (unknown and unknowable) upper bound. This should not
cause too much trouble for existing (stupid?) debuggers because
they have to deal with empty upper bounds location descriptions
anyway in order to be able to deal with incomplete array types.
Of course an intelligent debugger (GDB?) should be able to
comprehend that a missing upper bound specification in an array
type used for a storage class `auto' local array variable
indicates that the upper bound is both unknown (at compile- time)
and unknowable (at run-time) due to optimization.
We assume that a MEM rtx is safe because gcc wouldn't put the
value there unless it was going to be used repeatedly in the
function, i.e. for cleanups. */
if (SAVE_EXPR_RTL (bound)
&& (! optimize || GET_CODE (SAVE_EXPR_RTL (bound)) == MEM))
{
dw_die_ref ctx = lookup_decl_die (current_function_decl);
dw_die_ref decl_die = new_die (DW_TAG_variable, ctx, bound);
rtx loc = SAVE_EXPR_RTL (bound);
/* If the RTL for the SAVE_EXPR is memory, handle the case where
it references an outer function's frame. */
if (GET_CODE (loc) == MEM)
{
rtx new_addr = fix_lexical_addr (XEXP (loc, 0), bound);
if (XEXP (loc, 0) != new_addr)
loc = gen_rtx_MEM (GET_MODE (loc), new_addr);
}
add_AT_flag (decl_die, DW_AT_artificial, 1);
add_type_attribute (decl_die, TREE_TYPE (bound), 1, 0, ctx);
add_AT_location_description (decl_die, DW_AT_location,
loc_descriptor (loc));
add_AT_die_ref (subrange_die, bound_attr, decl_die);
}
/* Else leave out the attribute. */
break;
case VAR_DECL:
case PARM_DECL:
{
dw_die_ref decl_die = lookup_decl_die (bound);
/* ??? Can this happen, or should the variable have been bound
first? Probably it can, since I imagine that we try to create
the types of parameters in the order in which they exist in
the list, and won't have created a forward reference to a
later parameter. */
if (decl_die != NULL)
add_AT_die_ref (subrange_die, bound_attr, decl_die);
break;
}
default:
{
/* Otherwise try to create a stack operation procedure to
evaluate the value of the array bound. */
dw_die_ref ctx, decl_die;
dw_loc_descr_ref loc;
loc = loc_descriptor_from_tree (bound, 0);
if (loc == NULL)
break;
if (current_function_decl == 0)
ctx = comp_unit_die;
else
ctx = lookup_decl_die (current_function_decl);
/* If we weren't able to find a context, it's most likely the case
that we are processing the return type of the function. So
make a SAVE_EXPR to point to it and have the limbo DIE code
find the proper die. The save_expr function doesn't always
make a SAVE_EXPR, so do it ourselves. */
if (ctx == 0)
bound = build (SAVE_EXPR, TREE_TYPE (bound), bound,
current_function_decl, NULL_TREE);
decl_die = new_die (DW_TAG_variable, ctx, bound);
add_AT_flag (decl_die, DW_AT_artificial, 1);
add_type_attribute (decl_die, TREE_TYPE (bound), 1, 0, ctx);
add_AT_loc (decl_die, DW_AT_location, loc);
add_AT_die_ref (subrange_die, bound_attr, decl_die);
break;
}
}
}
/* Note that the block of subscript information for an array type also
includes information about the element type of type given array type. */
static void
add_subscript_info (type_die, type)
dw_die_ref type_die;
tree type;
{
#ifndef MIPS_DEBUGGING_INFO
unsigned dimension_number;
#endif
tree lower, upper;
dw_die_ref subrange_die;
/* The GNU compilers represent multidimensional array types as sequences of
one dimensional array types whose element types are themselves array
types. Here we squish that down, so that each multidimensional array
type gets only one array_type DIE in the Dwarf debugging info. The draft
Dwarf specification say that we are allowed to do this kind of
compression in C (because there is no difference between an array or
arrays and a multidimensional array in C) but for other source languages
(e.g. Ada) we probably shouldn't do this. */
/* ??? The SGI dwarf reader fails for multidimensional arrays with a
const enum type. E.g. const enum machine_mode insn_operand_mode[2][10].
We work around this by disabling this feature. See also
gen_array_type_die. */
#ifndef MIPS_DEBUGGING_INFO
for (dimension_number = 0;
TREE_CODE (type) == ARRAY_TYPE;
type = TREE_TYPE (type), dimension_number++)
#endif
{
tree domain = TYPE_DOMAIN (type);
/* Arrays come in three flavors: Unspecified bounds, fixed bounds,
and (in GNU C only) variable bounds. Handle all three forms
here. */
subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
if (domain)
{
/* We have an array type with specified bounds. */
lower = TYPE_MIN_VALUE (domain);
upper = TYPE_MAX_VALUE (domain);
/* define the index type. */
if (TREE_TYPE (domain))
{
/* ??? This is probably an Ada unnamed subrange type. Ignore the
TREE_TYPE field. We can't emit debug info for this
because it is an unnamed integral type. */
if (TREE_CODE (domain) == INTEGER_TYPE
&& TYPE_NAME (domain) == NULL_TREE
&& TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
&& TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
;
else
add_type_attribute (subrange_die, TREE_TYPE (domain), 0, 0,
type_die);
}
/* ??? If upper is NULL, the array has unspecified length,
but it does have a lower bound. This happens with Fortran
dimension arr(N:*)
Since the debugger is definitely going to need to know N
to produce useful results, go ahead and output the lower
bound solo, and hope the debugger can cope. */
add_bound_info (subrange_die, DW_AT_lower_bound, lower);
if (upper)
add_bound_info (subrange_die, DW_AT_upper_bound, upper);
}
/* Otherwise we have an array type with an unspecified length. The
DWARF-2 spec does not say how to handle this; let's just leave out the
bounds. */
}
}
static void
add_byte_size_attribute (die, tree_node)
dw_die_ref die;
tree tree_node;
{
unsigned size;
switch (TREE_CODE (tree_node))
{
case ERROR_MARK:
size = 0;
break;
case ENUMERAL_TYPE:
case RECORD_TYPE:
case UNION_TYPE:
case QUAL_UNION_TYPE:
size = int_size_in_bytes (tree_node);
break;
case FIELD_DECL:
/* For a data member of a struct or union, the DW_AT_byte_size is
generally given as the number of bytes normally allocated for an
object of the *declared* type of the member itself. This is true
even for bit-fields. */
size = simple_type_size_in_bits (field_type (tree_node)) / BITS_PER_UNIT;
break;
default:
abort ();
}
/* Note that `size' might be -1 when we get to this point. If it is, that
indicates that the byte size of the entity in question is variable. We
have no good way of expressing this fact in Dwarf at the present time,
so just let the -1 pass on through. */
add_AT_unsigned (die, DW_AT_byte_size, size);
}
/* For a FIELD_DECL node which represents a bit-field, output an attribute
which specifies the distance in bits from the highest order bit of the
"containing object" for the bit-field to the highest order bit of the
bit-field itself.
For any given bit-field, the "containing object" is a hypothetical object
(of some integral or enum type) within which the given bit-field lives. The
type of this hypothetical "containing object" is always the same as the
declared type of the individual bit-field itself. The determination of the
exact location of the "containing object" for a bit-field is rather
complicated. It's handled by the `field_byte_offset' function (above).
Note that it is the size (in bytes) of the hypothetical "containing object"
which will be given in the DW_AT_byte_size attribute for this bit-field.
(See `byte_size_attribute' above). */
static inline void
add_bit_offset_attribute (die, decl)
dw_die_ref die;
tree decl;
{
HOST_WIDE_INT object_offset_in_bytes = field_byte_offset (decl);
tree type = DECL_BIT_FIELD_TYPE (decl);
HOST_WIDE_INT bitpos_int;
HOST_WIDE_INT highest_order_object_bit_offset;
HOST_WIDE_INT highest_order_field_bit_offset;
HOST_WIDE_INT unsigned bit_offset;
/* Must be a field and a bit field. */
if (!type
|| TREE_CODE (decl) != FIELD_DECL)
abort ();
/* We can't yet handle bit-fields whose offsets are variable, so if we
encounter such things, just return without generating any attribute
whatsoever. Likewise for variable or too large size. */
if (! host_integerp (bit_position (decl), 0)
|| ! host_integerp (DECL_SIZE (decl), 1))
return;
bitpos_int = int_bit_position (decl);
/* Note that the bit offset is always the distance (in bits) from the
highest-order bit of the "containing object" to the highest-order bit of
the bit-field itself. Since the "high-order end" of any object or field
is different on big-endian and little-endian machines, the computation
below must take account of these differences. */
highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
highest_order_field_bit_offset = bitpos_int;
if (! BYTES_BIG_ENDIAN)
{
highest_order_field_bit_offset += tree_low_cst (DECL_SIZE (decl), 0);
highest_order_object_bit_offset += simple_type_size_in_bits (type);
}
bit_offset
= (! BYTES_BIG_ENDIAN
? highest_order_object_bit_offset - highest_order_field_bit_offset
: highest_order_field_bit_offset - highest_order_object_bit_offset);
add_AT_unsigned (die, DW_AT_bit_offset, bit_offset);
}
/* For a FIELD_DECL node which represents a bit field, output an attribute
which specifies the length in bits of the given field. */
static inline void
add_bit_size_attribute (die, decl)
dw_die_ref die;
tree decl;
{
/* Must be a field and a bit field. */
if (TREE_CODE (decl) != FIELD_DECL
|| ! DECL_BIT_FIELD_TYPE (decl))
abort ();
if (host_integerp (DECL_SIZE (decl), 1))
add_AT_unsigned (die, DW_AT_bit_size, tree_low_cst (DECL_SIZE (decl), 1));
}
/* If the compiled language is ANSI C, then add a 'prototyped'
attribute, if arg types are given for the parameters of a function. */
static inline void
add_prototyped_attribute (die, func_type)
dw_die_ref die;
tree func_type;
{
if (get_AT_unsigned (comp_unit_die, DW_AT_language) == DW_LANG_C89
&& TYPE_ARG_TYPES (func_type) != NULL)
add_AT_flag (die, DW_AT_prototyped, 1);
}
/* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
by looking in either the type declaration or object declaration
equate table. */
static inline void
add_abstract_origin_attribute (die, origin)
dw_die_ref die;
tree origin;
{
dw_die_ref origin_die = NULL;
if (TREE_CODE (origin) != FUNCTION_DECL)
{
/* We may have gotten separated from the block for the inlined
function, if we're in an exception handler or some such; make
sure that the abstract function has been written out.
Doing this for nested functions is wrong, however; functions are
distinct units, and our context might not even be inline. */
tree fn = origin;
if (TYPE_P (fn))
fn = TYPE_STUB_DECL (fn);
fn = decl_function_context (fn);
if (fn)
dwarf2out_abstract_function (fn);
}
if (DECL_P (origin))
origin_die = lookup_decl_die (origin);
else if (TYPE_P (origin))
origin_die = lookup_type_die (origin);
if (origin_die == NULL)
abort ();
add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
}
/* We do not currently support the pure_virtual attribute. */
static inline void
add_pure_or_virtual_attribute (die, func_decl)
dw_die_ref die;
tree func_decl;
{
if (DECL_VINDEX (func_decl))
{
add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
if (host_integerp (DECL_VINDEX (func_decl), 0))
add_AT_loc (die, DW_AT_vtable_elem_location,
new_loc_descr (DW_OP_constu,
tree_low_cst (DECL_VINDEX (func_decl), 0),
0));
/* GNU extension: Record what type this method came from originally. */
if (debug_info_level > DINFO_LEVEL_TERSE)
add_AT_die_ref (die, DW_AT_containing_type,
lookup_type_die (DECL_CONTEXT (func_decl)));
}
}
/* Add source coordinate attributes for the given decl. */
static void
add_src_coords_attributes (die, decl)
dw_die_ref die;
tree decl;
{
unsigned file_index = lookup_filename (DECL_SOURCE_FILE (decl));
add_AT_unsigned (die, DW_AT_decl_file, file_index);
add_AT_unsigned (die, DW_AT_decl_line, DECL_SOURCE_LINE (decl));
}
/* Add an DW_AT_name attribute and source coordinate attribute for the
given decl, but only if it actually has a name. */
static void
add_name_and_src_coords_attributes (die, decl)
dw_die_ref die;
tree decl;
{
tree decl_name;
decl_name = DECL_NAME (decl);
if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
{
add_name_attribute (die, dwarf2_name (decl, 0));
if (! DECL_ARTIFICIAL (decl))
add_src_coords_attributes (die, decl);
if ((TREE_CODE (decl) == FUNCTION_DECL || TREE_CODE (decl) == VAR_DECL)
&& TREE_PUBLIC (decl)
&& DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
&& !DECL_ABSTRACT (decl))
add_AT_string (die, DW_AT_MIPS_linkage_name,
IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
}
#ifdef VMS_DEBUGGING_INFO
/* Get the function's name, as described by its RTL. This may be different
from the DECL_NAME name used in the source file. */
if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
{
add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
XEXP (DECL_RTL (decl), 0));
VARRAY_PUSH_RTX (used_rtx_varray, XEXP (DECL_RTL (decl), 0));
}
#endif
}
/* Push a new declaration scope. */
static void
push_decl_scope (scope)
tree scope;
{
VARRAY_PUSH_TREE (decl_scope_table, scope);
}
/* Pop a declaration scope. */
static inline void
pop_decl_scope ()
{
if (VARRAY_ACTIVE_SIZE (decl_scope_table) <= 0)
abort ();
VARRAY_POP (decl_scope_table);
}
/* Return the DIE for the scope that immediately contains this type.
Non-named types get global scope. Named types nested in other
types get their containing scope if it's open, or global scope
otherwise. All other types (i.e. function-local named types) get
the current active scope. */
static dw_die_ref
scope_die_for (t, context_die)
tree t;
dw_die_ref context_die;
{
dw_die_ref scope_die = NULL;
tree containing_scope;
int i;
/* Non-types always go in the current scope. */
if (! TYPE_P (t))
abort ();
containing_scope = TYPE_CONTEXT (t);
/* Ignore namespaces for the moment. */
if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
containing_scope = NULL_TREE;
/* Ignore function type "scopes" from the C frontend. They mean that
a tagged type is local to a parmlist of a function declarator, but
that isn't useful to DWARF. */
if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
containing_scope = NULL_TREE;
if (containing_scope == NULL_TREE)
scope_die = comp_unit_die;
else if (TYPE_P (containing_scope))
{
/* For types, we can just look up the appropriate DIE. But
first we check to see if we're in the middle of emitting it
so we know where the new DIE should go. */
for (i = VARRAY_ACTIVE_SIZE (decl_scope_table) - 1; i >= 0; --i)
if (VARRAY_TREE (decl_scope_table, i) == containing_scope)
break;
if (i < 0)
{
if (debug_info_level > DINFO_LEVEL_TERSE
&& !TREE_ASM_WRITTEN (containing_scope))
abort ();
/* If none of the current dies are suitable, we get file scope. */
scope_die = comp_unit_die;
}
else
scope_die = lookup_type_die (containing_scope);
}
else
scope_die = context_die;
return scope_die;
}
/* Returns nonzero if CONTEXT_DIE is internal to a function. */
static inline int
local_scope_p (context_die)
dw_die_ref context_die;
{
for (; context_die; context_die = context_die->die_parent)
if (context_die->die_tag == DW_TAG_inlined_subroutine
|| context_die->die_tag == DW_TAG_subprogram)
return 1;
return 0;
}
/* Returns nonzero if CONTEXT_DIE is a class. */
static inline int
class_scope_p (context_die)
dw_die_ref context_die;
{
return (context_die
&& (context_die->die_tag == DW_TAG_structure_type
|| context_die->die_tag == DW_TAG_union_type));
}
/* Many forms of DIEs require a "type description" attribute. This
routine locates the proper "type descriptor" die for the type given
by 'type', and adds an DW_AT_type attribute below the given die. */
static void
add_type_attribute (object_die, type, decl_const, decl_volatile, context_die)
dw_die_ref object_die;
tree type;
int decl_const;
int decl_volatile;
dw_die_ref context_die;
{
enum tree_code code = TREE_CODE (type);
dw_die_ref type_die = NULL;
/* ??? If this type is an unnamed subrange type of an integral or
floating-point type, use the inner type. This is because we have no
support for unnamed types in base_type_die. This can happen if this is
an Ada subrange type. Correct solution is emit a subrange type die. */
if ((code == INTEGER_TYPE || code == REAL_TYPE)
&& TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
type = TREE_TYPE (type), code = TREE_CODE (type);
if (code == ERROR_MARK
/* Handle a special case. For functions whose return type is void, we
generate *no* type attribute. (Note that no object may have type
`void', so this only applies to function return types). */
|| code == VOID_TYPE)
return;
type_die = modified_type_die (type,
decl_const || TYPE_READONLY (type),
decl_volatile || TYPE_VOLATILE (type),
context_die);
if (type_die != NULL)
add_AT_die_ref (object_die, DW_AT_type, type_die);
}
/* Given a tree pointer to a struct, class, union, or enum type node, return
a pointer to the (string) tag name for the given type, or zero if the type
was declared without a tag. */
static const char *
type_tag (type)
tree type;
{
const char *name = 0;
if (TYPE_NAME (type) != 0)
{
tree t = 0;
/* Find the IDENTIFIER_NODE for the type name. */
if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE)
t = TYPE_NAME (type);
/* The g++ front end makes the TYPE_NAME of *each* tagged type point to
a TYPE_DECL node, regardless of whether or not a `typedef' was
involved. */
else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
&& ! DECL_IGNORED_P (TYPE_NAME (type)))
t = DECL_NAME (TYPE_NAME (type));
/* Now get the name as a string, or invent one. */
if (t != 0)
name = IDENTIFIER_POINTER (t);
}
return (name == 0 || *name == '\0') ? 0 : name;
}
/* Return the type associated with a data member, make a special check
for bit field types. */
static inline tree
member_declared_type (member)
tree member;
{
return (DECL_BIT_FIELD_TYPE (member)
? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
}
/* Get the decl's label, as described by its RTL. This may be different
from the DECL_NAME name used in the source file. */
/* These routines generate the internal representation of the DIE's for
the compilation unit. Debugging information is collected by walking
the declaration trees passed in from dwarf2out_decl(). */
static void
gen_array_type_die (type, context_die)
tree type;
dw_die_ref context_die;
{
dw_die_ref scope_die = scope_die_for (type, context_die);
dw_die_ref array_die;
tree element_type;
/* ??? The SGI dwarf reader fails for array of array of enum types unless
the inner array type comes before the outer array type. Thus we must
call gen_type_die before we call new_die. See below also. */
#ifdef MIPS_DEBUGGING_INFO
gen_type_die (TREE_TYPE (type), context_die);
#endif
array_die = new_die (DW_TAG_array_type, scope_die, type);
add_name_attribute (array_die, type_tag (type));
equate_type_number_to_die (type, array_die);
if (TREE_CODE (type) == VECTOR_TYPE)
{
/* The frontend feeds us a representation for the vector as a struct
containing an array. Pull out the array type. */
type = TREE_TYPE (TYPE_FIELDS (TYPE_DEBUG_REPRESENTATION_TYPE (type)));
add_AT_flag (array_die, DW_AT_GNU_vector, 1);
}
#ifdef MIPS_DEBUGGING_INFO
/* The SGI compilers handle arrays of unknown bound by setting
AT_declaration and not emitting any subrange DIEs. */
if (! TYPE_DOMAIN (type))
add_AT_unsigned (array_die, DW_AT_declaration, 1);
else
#endif
add_subscript_info (array_die, type);
/* Add representation of the type of the elements of this array type. */
element_type = TREE_TYPE (type);
/* ??? The SGI dwarf reader fails for multidimensional arrays with a
const enum type. E.g. const enum machine_mode insn_operand_mode[2][10].
We work around this by disabling this feature. See also
add_subscript_info. */
#ifndef MIPS_DEBUGGING_INFO
while (TREE_CODE (element_type) == ARRAY_TYPE)
element_type = TREE_TYPE (element_type);
gen_type_die (element_type, context_die);
#endif
add_type_attribute (array_die, element_type, 0, 0, context_die);
}
static void
gen_set_type_die (type, context_die)
tree type;
dw_die_ref context_die;
{
dw_die_ref type_die
= new_die (DW_TAG_set_type, scope_die_for (type, context_die), type);
equate_type_number_to_die (type, type_die);
add_type_attribute (type_die, TREE_TYPE (type), 0, 0, context_die);
}
/* Walk through the list of incomplete types again, trying once more to
emit full debugging info for them. */
static void
retry_incomplete_types ()
{
int i;
for (i = VARRAY_ACTIVE_SIZE (incomplete_types) - 1; i >= 0; i--)
gen_type_die (VARRAY_TREE (incomplete_types, i), comp_unit_die);
}
/* Generate a DIE to represent an inlined instance of an enumeration type. */
static void
gen_inlined_enumeration_type_die (type, context_die)
tree type;
dw_die_ref context_die;
{
dw_die_ref type_die = new_die (DW_TAG_enumeration_type, context_die, type);
/* We do not check for TREE_ASM_WRITTEN (type) being set, as the type may
be incomplete and such types are not marked. */
add_abstract_origin_attribute (type_die, type);
}
/* Generate a DIE to represent an inlined instance of a structure type. */
static void
gen_inlined_structure_type_die (type, context_die)
tree type;
dw_die_ref context_die;
{
dw_die_ref type_die = new_die (DW_TAG_structure_type, context_die, type);
/* We do not check for TREE_ASM_WRITTEN (type) being set, as the type may
be incomplete and such types are not marked. */
add_abstract_origin_attribute (type_die, type);
}
/* Generate a DIE to represent an inlined instance of a union type. */
static void
gen_inlined_union_type_die (type, context_die)
tree type;
dw_die_ref context_die;
{
dw_die_ref type_die = new_die (DW_TAG_union_type, context_die, type);
/* We do not check for TREE_ASM_WRITTEN (type) being set, as the type may
be incomplete and such types are not marked. */
add_abstract_origin_attribute (type_die, type);
}
/* Generate a DIE to represent an enumeration type. Note that these DIEs
include all of the information about the enumeration values also. Each
enumerated type name/value is listed as a child of the enumerated type
DIE. */
static void
gen_enumeration_type_die (type, context_die)
tree type;
dw_die_ref context_die;
{
dw_die_ref type_die = lookup_type_die (type);
if (type_die == NULL)
{
type_die = new_die (DW_TAG_enumeration_type,
scope_die_for (type, context_die), type);
equate_type_number_to_die (type, type_die);
add_name_attribute (type_die, type_tag (type));
}
else if (! TYPE_SIZE (type))
return;
else
remove_AT (type_die, DW_AT_declaration);
/* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
given enum type is incomplete, do not generate the DW_AT_byte_size
attribute or the DW_AT_element_list attribute. */
if (TYPE_SIZE (type))
{
tree link;
TREE_ASM_WRITTEN (type) = 1;
add_byte_size_attribute (type_die, type);
if (TYPE_STUB_DECL (type) != NULL_TREE)
add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
/* If the first reference to this type was as the return type of an
inline function, then it may not have a parent. Fix this now. */
if (type_die->die_parent == NULL)
add_child_die (scope_die_for (type, context_die), type_die);
for (link = TYPE_FIELDS (type);
link != NULL; link = TREE_CHAIN (link))
{
dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
add_name_attribute (enum_die,
IDENTIFIER_POINTER (TREE_PURPOSE (link)));
if (host_integerp (TREE_VALUE (link),
TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (link)))))
{
if (tree_int_cst_sgn (TREE_VALUE (link)) < 0)
add_AT_int (enum_die, DW_AT_const_value,
tree_low_cst (TREE_VALUE (link), 0));
else
add_AT_unsigned (enum_die, DW_AT_const_value,
tree_low_cst (TREE_VALUE (link), 1));
}
}
}
else
add_AT_flag (type_die, DW_AT_declaration, 1);
}
/* Generate a DIE to represent either a real live formal parameter decl or to
represent just the type of some formal parameter position in some function
type.
Note that this routine is a bit unusual because its argument may be a
..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
node. If it's the former then this function is being called to output a
DIE to represent a formal parameter object (or some inlining thereof). If
it's the latter, then this function is only being called to output a
DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
argument type of some subprogram type. */
static dw_die_ref
gen_formal_parameter_die (node, context_die)
tree node;
dw_die_ref context_die;
{
dw_die_ref parm_die
= new_die (DW_TAG_formal_parameter, context_die, node);
tree origin;
switch (TREE_CODE_CLASS (TREE_CODE (node)))
{
case 'd':
origin = decl_ultimate_origin (node);
if (origin != NULL)
add_abstract_origin_attribute (parm_die, origin);
else
{
add_name_and_src_coords_attributes (parm_die, node);
add_type_attribute (parm_die, TREE_TYPE (node),
TREE_READONLY (node),
TREE_THIS_VOLATILE (node),
context_die);
if (DECL_ARTIFICIAL (node))
add_AT_flag (parm_die, DW_AT_artificial, 1);
}
equate_decl_number_to_die (node, parm_die);
if (! DECL_ABSTRACT (node))
add_location_or_const_value_attribute (parm_die, node);
break;
case 't':
/* We were called with some kind of a ..._TYPE node. */
add_type_attribute (parm_die, node, 0, 0, context_die);
break;
default:
abort ();
}
return parm_die;
}
/* Generate a special type of DIE used as a stand-in for a trailing ellipsis
at the end of an (ANSI prototyped) formal parameters list. */
static void
gen_unspecified_parameters_die (decl_or_type, context_die)
tree decl_or_type;
dw_die_ref context_die;
{
new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
}
/* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
DW_TAG_unspecified_parameters DIE) to represent the types of the formal
parameters as specified in some function type specification (except for
those which appear as part of a function *definition*). */
static void
gen_formal_types_die (function_or_method_type, context_die)
tree function_or_method_type;
dw_die_ref context_die;
{
tree link;
tree formal_type = NULL;
tree first_parm_type;
tree arg;
if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
{
arg = DECL_ARGUMENTS (function_or_method_type);
function_or_method_type = TREE_TYPE (function_or_method_type);
}
else
arg = NULL_TREE;
first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
/* Make our first pass over the list of formal parameter types and output a
DW_TAG_formal_parameter DIE for each one. */
for (link = first_parm_type; link; )
{
dw_die_ref parm_die;
formal_type = TREE_VALUE (link);
if (formal_type == void_type_node)
break;
/* Output a (nameless) DIE to represent the formal parameter itself. */
parm_die = gen_formal_parameter_die (formal_type, context_die);
if ((TREE_CODE (function_or_method_type) == METHOD_TYPE
&& link == first_parm_type)
|| (arg && DECL_ARTIFICIAL (arg)))
add_AT_flag (parm_die, DW_AT_artificial, 1);
link = TREE_CHAIN (link);
if (arg)
arg = TREE_CHAIN (arg);
}
/* If this function type has an ellipsis, add a
DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
if (formal_type != void_type_node)
gen_unspecified_parameters_die (function_or_method_type, context_die);
/* Make our second (and final) pass over the list of formal parameter types
and output DIEs to represent those types (as necessary). */
for (link = TYPE_ARG_TYPES (function_or_method_type);
link && TREE_VALUE (link);
link = TREE_CHAIN (link))
gen_type_die (TREE_VALUE (link), context_die);
}
/* We want to generate the DIE for TYPE so that we can generate the
die for MEMBER, which has been defined; we will need to refer back
to the member declaration nested within TYPE. If we're trying to
generate minimal debug info for TYPE, processing TYPE won't do the
trick; we need to attach the member declaration by hand. */
static void
gen_type_die_for_member (type, member, context_die)
tree type, member;
dw_die_ref context_die;
{
gen_type_die (type, context_die);
/* If we're trying to avoid duplicate debug info, we may not have
emitted the member decl for this function. Emit it now. */
if (TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
&& ! lookup_decl_die (member))
{
if (decl_ultimate_origin (member))
abort ();
push_decl_scope (type);
if (TREE_CODE (member) == FUNCTION_DECL)
gen_subprogram_die (member, lookup_type_die (type));
else
gen_variable_die (member, lookup_type_die (type));
pop_decl_scope ();
}
}
/* Generate the DWARF2 info for the "abstract" instance of a function which we
may later generate inlined and/or out-of-line instances of. */
static void
dwarf2out_abstract_function (decl)
tree decl;
{
dw_die_ref old_die;
tree save_fn;
tree context;
int was_abstract = DECL_ABSTRACT (decl);
/* Make sure we have the actual abstract inline, not a clone. */
decl = DECL_ORIGIN (decl);
old_die = lookup_decl_die (decl);
if (old_die && get_AT_unsigned (old_die, DW_AT_inline))
/* We've already generated the abstract instance. */
return;
/* Be sure we've emitted the in-class declaration DIE (if any) first, so
we don't get confused by DECL_ABSTRACT. */
if (debug_info_level > DINFO_LEVEL_TERSE)
{
context = decl_class_context (decl);
if (context)
gen_type_die_for_member
(context, decl, decl_function_context (decl) ? NULL : comp_unit_die);
}
/* Pretend we've just finished compiling this function. */
save_fn = current_function_decl;
current_function_decl = decl;
set_decl_abstract_flags (decl, 1);
dwarf2out_decl (decl);
if (! was_abstract)
set_decl_abstract_flags (decl, 0);
current_function_decl = save_fn;
}
/* Generate a DIE to represent a declared function (either file-scope or
block-local). */
static void
gen_subprogram_die (decl, context_die)
tree decl;
dw_die_ref context_die;
{
char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
tree origin = decl_ultimate_origin (decl);
dw_die_ref subr_die;
rtx fp_reg;
tree fn_arg_types;
tree outer_scope;
dw_die_ref old_die = lookup_decl_die (decl);
int declaration = (current_function_decl != decl
|| class_scope_p (context_die));
/* It is possible to have both DECL_ABSTRACT and DECLARATION be true if we
started to generate the abstract instance of an inline, decided to output
its containing class, and proceeded to emit the declaration of the inline
from the member list for the class. If so, DECLARATION takes priority;
we'll get back to the abstract instance when done with the class. */
/* The class-scope declaration DIE must be the primary DIE. */
if (origin && declaration && class_scope_p (context_die))
{
origin = NULL;
if (old_die)
abort ();
}
if (origin != NULL)
{
if (declaration && ! local_scope_p (context_die))
abort ();
/* Fixup die_parent for the abstract instance of a nested
inline function. */
if (old_die && old_die->die_parent == NULL)
add_child_die (context_die, old_die);
subr_die = new_die (DW_TAG_subprogram, context_die, decl);
add_abstract_origin_attribute (subr_die, origin);
}
else if (old_die)
{
unsigned file_index = lookup_filename (DECL_SOURCE_FILE (decl));
if (!get_AT_flag (old_die, DW_AT_declaration)
/* We can have a normal definition following an inline one in the
case of redefinition of GNU C extern inlines.
It seems reasonable to use AT_specification in this case. */
&& !get_AT_unsigned (old_die, DW_AT_inline))
{
/* ??? This can happen if there is a bug in the program, for
instance, if it has duplicate function definitions. Ideally,
we should detect this case and ignore it. For now, if we have
already reported an error, any error at all, then assume that
we got here because of an input error, not a dwarf2 bug. */
if (errorcount)
return;
abort ();
}
/* If the definition comes from the same place as the declaration,
maybe use the old DIE. We always want the DIE for this function
that has the *_pc attributes to be under comp_unit_die so the
debugger can find it. We also need to do this for abstract
instances of inlines, since the spec requires the out-of-line copy
to have the same parent. For local class methods, this doesn't
apply; we just use the old DIE. */
if ((old_die->die_parent == comp_unit_die || context_die == NULL)
&& (DECL_ARTIFICIAL (decl)
|| (get_AT_unsigned (old_die, DW_AT_decl_file) == file_index
&& (get_AT_unsigned (old_die, DW_AT_decl_line)
== (unsigned) DECL_SOURCE_LINE (decl)))))
{
subr_die = old_die;
/* Clear out the declaration attribute and the parm types. */
remove_AT (subr_die, DW_AT_declaration);
remove_children (subr_die);
}
else
{
subr_die = new_die (DW_TAG_subprogram, context_die, decl);
add_AT_die_ref (subr_die, DW_AT_specification, old_die);
if (get_AT_unsigned (old_die, DW_AT_decl_file) != file_index)
add_AT_unsigned (subr_die, DW_AT_decl_file, file_index);
if (get_AT_unsigned (old_die, DW_AT_decl_line)
!= (unsigned) DECL_SOURCE_LINE (decl))
add_AT_unsigned
(subr_die, DW_AT_decl_line, DECL_SOURCE_LINE (decl));
}
}
else
{
subr_die = new_die (DW_TAG_subprogram, context_die, decl);
if (TREE_PUBLIC (decl))
add_AT_flag (subr_die, DW_AT_external, 1);
add_name_and_src_coords_attributes (subr_die, decl);
if (debug_info_level > DINFO_LEVEL_TERSE)
{
add_prototyped_attribute (subr_die, TREE_TYPE (decl));
add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
0, 0, context_die);
}
add_pure_or_virtual_attribute (subr_die, decl);
if (DECL_ARTIFICIAL (decl))
add_AT_flag (subr_die, DW_AT_artificial, 1);
if (TREE_PROTECTED (decl))
add_AT_unsigned (subr_die, DW_AT_accessibility, DW_ACCESS_protected);
else if (TREE_PRIVATE (decl))
add_AT_unsigned (subr_die, DW_AT_accessibility, DW_ACCESS_private);
}
if (declaration)
{
if (!old_die || !get_AT_unsigned (old_die, DW_AT_inline))
{
add_AT_flag (subr_die, DW_AT_declaration, 1);
/* The first time we see a member function, it is in the context of
the class to which it belongs. We make sure of this by emitting
the class first. The next time is the definition, which is
handled above. The two may come from the same source text. */
if (DECL_CONTEXT (decl) || DECL_ABSTRACT (decl))
equate_decl_number_to_die (decl, subr_die);
}
}
else if (DECL_ABSTRACT (decl))
{
if (DECL_INLINE (decl) && !flag_no_inline)
{
/* ??? Checking DECL_DEFER_OUTPUT is correct for static
inline functions, but not for extern inline functions.
We can't get this completely correct because information
about whether the function was declared inline is not
saved anywhere. */
if (DECL_DEFER_OUTPUT (decl))
add_AT_unsigned (subr_die, DW_AT_inline, DW_INL_declared_inlined);
else
add_AT_unsigned (subr_die, DW_AT_inline, DW_INL_inlined);
}
else
add_AT_unsigned (subr_die, DW_AT_inline, DW_INL_declared_not_inlined);
equate_decl_number_to_die (decl, subr_die);
}
else if (!DECL_EXTERNAL (decl))
{
if (!old_die || !get_AT_unsigned (old_die, DW_AT_inline))
equate_decl_number_to_die (decl, subr_die);
ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_BEGIN_LABEL,
current_function_funcdef_no);
add_AT_lbl_id (subr_die, DW_AT_low_pc, label_id);
ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
current_function_funcdef_no);
add_AT_lbl_id (subr_die, DW_AT_high_pc, label_id);
add_pubname (decl, subr_die);
add_arange (decl, subr_die);
#ifdef MIPS_DEBUGGING_INFO
/* Add a reference to the FDE for this routine. */
add_AT_fde_ref (subr_die, DW_AT_MIPS_fde, current_funcdef_fde);
#endif
/* Define the "frame base" location for this routine. We use the
frame pointer or stack pointer registers, since the RTL for local
variables is relative to one of them. */
fp_reg
= frame_pointer_needed ? hard_frame_pointer_rtx : stack_pointer_rtx;
add_AT_loc (subr_die, DW_AT_frame_base, reg_loc_descriptor (fp_reg));
}
/* Now output descriptions of the arguments for this function. This gets
(unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
for a FUNCTION_DECL doesn't indicate cases where there was a trailing
`...' at the end of the formal parameter list. In order to find out if
there was a trailing ellipsis or not, we must instead look at the type
associated with the FUNCTION_DECL. This will be a node of type
FUNCTION_TYPE. If the chain of type nodes hanging off of this
FUNCTION_TYPE node ends with a void_type_node then there should *not* be
an ellipsis at the end. */
/* In the case where we are describing a mere function declaration, all we
need to do here (and all we *can* do here) is to describe the *types* of
its formal parameters. */
if (debug_info_level <= DINFO_LEVEL_TERSE)
;
else if (declaration)
gen_formal_types_die (decl, subr_die);
else
{
/* Generate DIEs to represent all known formal parameters */
tree arg_decls = DECL_ARGUMENTS (decl);
tree parm;
/* When generating DIEs, generate the unspecified_parameters DIE
instead if we come across the arg "__builtin_va_alist" */
for (parm = arg_decls; parm; parm = TREE_CHAIN (parm))
if (TREE_CODE (parm) == PARM_DECL)
{
if (DECL_NAME (parm)
&& !strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
"__builtin_va_alist"))
gen_unspecified_parameters_die (parm, subr_die);
else
gen_decl_die (parm, subr_die);
}
/* Decide whether we need an unspecified_parameters DIE at the end.
There are 2 more cases to do this for: 1) the ansi ... declaration -
this is detectable when the end of the arg list is not a
void_type_node 2) an unprototyped function declaration (not a
definition). This just means that we have no info about the
parameters at all. */
fn_arg_types = TYPE_ARG_TYPES (TREE_TYPE (decl));
if (fn_arg_types != NULL)
{
/* this is the prototyped case, check for ... */
if (TREE_VALUE (tree_last (fn_arg_types)) != void_type_node)
gen_unspecified_parameters_die (decl, subr_die);
}
else if (DECL_INITIAL (decl) == NULL_TREE)
gen_unspecified_parameters_die (decl, subr_die);
}
/* Output Dwarf info for all of the stuff within the body of the function
(if it has one - it may be just a declaration). */
outer_scope = DECL_INITIAL (decl);
/* OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
a function. This BLOCK actually represents the outermost binding contour
for the function, i.e. the contour in which the function's formal
parameters and labels get declared. Curiously, it appears that the front
end doesn't actually put the PARM_DECL nodes for the current function onto
the BLOCK_VARS list for this outer scope, but are strung off of the
DECL_ARGUMENTS list for the function instead.
The BLOCK_VARS list for the `outer_scope' does provide us with a list of
the LABEL_DECL nodes for the function however, and we output DWARF info
for those in decls_for_scope. Just within the `outer_scope' there will be
a BLOCK node representing the function's outermost pair of curly braces,
and any blocks used for the base and member initializers of a C++
constructor function. */
if (! declaration && TREE_CODE (outer_scope) != ERROR_MARK)
{
current_function_has_inlines = 0;
decls_for_scope (outer_scope, subr_die, 0);
}
}
/* Generate a DIE to represent a declared data object. */
static void
gen_variable_die (decl, context_die)
tree decl;
dw_die_ref context_die;
{
tree origin = decl_ultimate_origin (decl);
dw_die_ref var_die = new_die (DW_TAG_variable, context_die, decl);
dw_die_ref old_die = lookup_decl_die (decl);
int declaration = (DECL_EXTERNAL (decl)
|| class_scope_p (context_die));
if (origin != NULL)
add_abstract_origin_attribute (var_die, origin);
/* Loop unrolling can create multiple blocks that refer to the same
static variable, so we must test for the DW_AT_declaration flag.
??? Loop unrolling/reorder_blocks should perhaps be rewritten to
copy decls and set the DECL_ABSTRACT flag on them instead of
sharing them.
??? Duplicated blocks have been rewritten to use .debug_ranges. */
else if (old_die && TREE_STATIC (decl)
&& get_AT_flag (old_die, DW_AT_declaration) == 1)
{
/* This is a definition of a C++ class level static. */
add_AT_die_ref (var_die, DW_AT_specification, old_die);
if (DECL_NAME (decl))
{
unsigned file_index = lookup_filename (DECL_SOURCE_FILE (decl));
if (get_AT_unsigned (old_die, DW_AT_decl_file) != file_index)
add_AT_unsigned (var_die, DW_AT_decl_file, file_index);
if (get_AT_unsigned (old_die, DW_AT_decl_line)
!= (unsigned) DECL_SOURCE_LINE (decl))
add_AT_unsigned (var_die, DW_AT_decl_line,
DECL_SOURCE_LINE (decl));
}
}
else
{
add_name_and_src_coords_attributes (var_die, decl);
add_type_attribute (var_die, TREE_TYPE (decl), TREE_READONLY (decl),
TREE_THIS_VOLATILE (decl), context_die);
if (TREE_PUBLIC (decl))
add_AT_flag (var_die, DW_AT_external, 1);
if (DECL_ARTIFICIAL (decl))
add_AT_flag (var_die, DW_AT_artificial, 1);
if (TREE_PROTECTED (decl))
add_AT_unsigned (var_die, DW_AT_accessibility, DW_ACCESS_protected);
else if (TREE_PRIVATE (decl))
add_AT_unsigned (var_die, DW_AT_accessibility, DW_ACCESS_private);
}
if (declaration)
add_AT_flag (var_die, DW_AT_declaration, 1);
if (class_scope_p (context_die) || DECL_ABSTRACT (decl))
equate_decl_number_to_die (decl, var_die);
if (! declaration && ! DECL_ABSTRACT (decl))
{
add_location_or_const_value_attribute (var_die, decl);
add_pubname (decl, var_die);
}
else
tree_add_const_value_attribute (var_die, decl);
}
/* Generate a DIE to represent a label identifier. */
static void
gen_label_die (decl, context_die)
tree decl;
dw_die_ref context_die;
{
tree origin = decl_ultimate_origin (decl);
dw_die_ref lbl_die = new_die (DW_TAG_label, context_die, decl);
rtx insn;
char label[MAX_ARTIFICIAL_LABEL_BYTES];
if (origin != NULL)
add_abstract_origin_attribute (lbl_die, origin);
else
add_name_and_src_coords_attributes (lbl_die, decl);
if (DECL_ABSTRACT (decl))
equate_decl_number_to_die (decl, lbl_die);
else
{
insn = DECL_RTL (decl);
/* Deleted labels are programmer specified labels which have been
eliminated because of various optimisations. We still emit them
here so that it is possible to put breakpoints on them. */
if (GET_CODE (insn) == CODE_LABEL
|| ((GET_CODE (insn) == NOTE
&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
{
/* When optimization is enabled (via -O) some parts of the compiler
(e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
represent source-level labels which were explicitly declared by
the user. This really shouldn't be happening though, so catch
it if it ever does happen. */
if (INSN_DELETED_P (insn))
abort ();
ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
}
}
}
/* Generate a DIE for a lexical block. */
static void
gen_lexical_block_die (stmt, context_die, depth)
tree stmt;
dw_die_ref context_die;
int depth;
{
dw_die_ref stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
char label[MAX_ARTIFICIAL_LABEL_BYTES];
if (! BLOCK_ABSTRACT (stmt))
{
if (BLOCK_FRAGMENT_CHAIN (stmt))
{
tree chain;
add_AT_range_list (stmt_die, DW_AT_ranges, add_ranges (stmt));
chain = BLOCK_FRAGMENT_CHAIN (stmt);
do
{
add_ranges (chain);
chain = BLOCK_FRAGMENT_CHAIN (chain);
}
while (chain);
add_ranges (NULL);
}
else
{
ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
BLOCK_NUMBER (stmt));
add_AT_lbl_id (stmt_die, DW_AT_low_pc, label);
ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_END_LABEL,
BLOCK_NUMBER (stmt));
add_AT_lbl_id (stmt_die, DW_AT_high_pc, label);
}
}
decls_for_scope (stmt, stmt_die, depth);
}
/* Generate a DIE for an inlined subprogram. */
static void
gen_inlined_subroutine_die (stmt, context_die, depth)
tree stmt;
dw_die_ref context_die;
int depth;
{
tree decl = block_ultimate_origin (stmt);
/* Emit info for the abstract instance first, if we haven't yet. We
must emit this even if the block is abstract, otherwise when we
emit the block below (or elsewhere), we may end up trying to emit
a die whose origin die hasn't been emitted, and crashing. */
dwarf2out_abstract_function (decl);
if (! BLOCK_ABSTRACT (stmt))
{
dw_die_ref subr_die
= new_die (DW_TAG_inlined_subroutine, context_die, stmt);
char label[MAX_ARTIFICIAL_LABEL_BYTES];
add_abstract_origin_attribute (subr_die, decl);
ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
BLOCK_NUMBER (stmt));
add_AT_lbl_id (subr_die, DW_AT_low_pc, label);
ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_END_LABEL,
BLOCK_NUMBER (stmt));
add_AT_lbl_id (subr_die, DW_AT_high_pc, label);
decls_for_scope (stmt, subr_die, depth);
current_function_has_inlines = 1;
}
else
/* We may get here if we're the outer block of function A that was
inlined into function B that was inlined into function C. When
generating debugging info for C, dwarf2out_abstract_function(B)
would mark all inlined blocks as abstract, including this one.
So, we wouldn't (and shouldn't) expect labels to be generated
for this one. Instead, just emit debugging info for
declarations within the block. This is particularly important
in the case of initializers of arguments passed from B to us:
if they're statement expressions containing declarations, we
wouldn't generate dies for their abstract variables, and then,
when generating dies for the real variables, we'd die (pun
intended :-) */
gen_lexical_block_die (stmt, context_die, depth);
}
/* Generate a DIE for a field in a record, or structure. */
static void
gen_field_die (decl, context_die)
tree decl;
dw_die_ref context_die;
{
dw_die_ref decl_die;
if (TREE_TYPE (decl) == error_mark_node)
return;
decl_die = new_die (DW_TAG_member, context_die, decl);
add_name_and_src_coords_attributes (decl_die, decl);
add_type_attribute (decl_die, member_declared_type (decl),
TREE_READONLY (decl), TREE_THIS_VOLATILE (decl),
context_die);
if (DECL_BIT_FIELD_TYPE (decl))
{
add_byte_size_attribute (decl_die, decl);
add_bit_size_attribute (decl_die, decl);
add_bit_offset_attribute (decl_die, decl);
}
if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
add_data_member_location_attribute (decl_die, decl);
if (DECL_ARTIFICIAL (decl))
add_AT_flag (decl_die, DW_AT_artificial, 1);
if (TREE_PROTECTED (decl))
add_AT_unsigned (decl_die, DW_AT_accessibility, DW_ACCESS_protected);
else if (TREE_PRIVATE (decl))
add_AT_unsigned (decl_die, DW_AT_accessibility, DW_ACCESS_private);
}
/* Generate a DIE for a pointer to a member type. */
static void
gen_ptr_to_mbr_type_die (type, context_die)
tree type;
dw_die_ref context_die;
{
dw_die_ref ptr_die
= new_die (DW_TAG_ptr_to_member_type,
scope_die_for (type, context_die), type);
equate_type_number_to_die (type, ptr_die);
add_AT_die_ref (ptr_die, DW_AT_containing_type,
lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
add_type_attribute (ptr_die, TREE_TYPE (type), 0, 0, context_die);
}
/* Generate the DIE for the compilation unit. */
static dw_die_ref
gen_compile_unit_die (filename)
const char *filename;
{
dw_die_ref die;
char producer[250];
const char *wd = getpwd ();
const char *language_string = lang_hooks.name;
int language;
die = new_die (DW_TAG_compile_unit, NULL, NULL);
add_name_attribute (die, filename);
if (wd != NULL && filename[0] != DIR_SEPARATOR)
add_AT_string (die, DW_AT_comp_dir, wd);
sprintf (producer, "%s %s", language_string, version_string);
#ifdef MIPS_DEBUGGING_INFO
/* The MIPS/SGI compilers place the 'cc' command line options in the producer
string. The SGI debugger looks for -g, -g1, -g2, or -g3; if they do
not appear in the producer string, the debugger reaches the conclusion
that the object file is stripped and has no debugging information.
To get the MIPS/SGI debugger to believe that there is debugging
information in the object file, we add a -g to the producer string. */
if (debug_info_level > DINFO_LEVEL_TERSE)
strcat (producer, " -g");
#endif
add_AT_string (die, DW_AT_producer, producer);
if (strcmp (language_string, "GNU C++") == 0)
language = DW_LANG_C_plus_plus;
else if (strcmp (language_string, "GNU Ada") == 0)
language = DW_LANG_Ada83;
else if (strcmp (language_string, "GNU F77") == 0)
language = DW_LANG_Fortran77;
else if (strcmp (language_string, "GNU Pascal") == 0)
language = DW_LANG_Pascal83;
else if (strcmp (language_string, "GNU Java") == 0)
language = DW_LANG_Java;
else
language = DW_LANG_C89;
add_AT_unsigned (die, DW_AT_language, language);
return die;
}
/* Generate a DIE for a string type. */
static void
gen_string_type_die (type, context_die)
tree type;
dw_die_ref context_die;
{
dw_die_ref type_die
= new_die (DW_TAG_string_type, scope_die_for (type, context_die), type);
equate_type_number_to_die (type, type_die);
/* ??? Fudge the string length attribute for now.
TODO: add string length info. */
}
/* Generate the DIE for a base class. */
static void
gen_inheritance_die (binfo, context_die)
tree binfo;
dw_die_ref context_die;
{
dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
add_type_attribute (die, BINFO_TYPE (binfo), 0, 0, context_die);
add_data_member_location_attribute (die, binfo);
if (TREE_VIA_VIRTUAL (binfo))
add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
if (TREE_VIA_PUBLIC (binfo))
add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
else if (TREE_VIA_PROTECTED (binfo))
add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
}
/* Generate a DIE for a class member. */
static void
gen_member_die (type, context_die)
tree type;
dw_die_ref context_die;
{
tree member;
dw_die_ref child;
/* If this is not an incomplete type, output descriptions of each of its
members. Note that as we output the DIEs necessary to represent the
members of this record or union type, we will also be trying to output
DIEs to represent the *types* of those members. However the `type'
function (above) will specifically avoid generating type DIEs for member
types *within* the list of member DIEs for this (containing) type except
for those types (of members) which are explicitly marked as also being
members of this (containing) type themselves. The g++ front- end can
force any given type to be treated as a member of some other (containing)
type by setting the TYPE_CONTEXT of the given (member) type to point to
the TREE node representing the appropriate (containing) type. */
/* First output info about the base classes. */
if (TYPE_BINFO (type) && TYPE_BINFO_BASETYPES (type))
{
tree bases = TYPE_BINFO_BASETYPES (type);
int n_bases = TREE_VEC_LENGTH (bases);
int i;
for (i = 0; i < n_bases; i++)
gen_inheritance_die (TREE_VEC_ELT (bases, i), context_die);
}
/* Now output info about the data members and type members. */
for (member = TYPE_FIELDS (type); member; member = TREE_CHAIN (member))
{
/* If we thought we were generating minimal debug info for TYPE
and then changed our minds, some of the member declarations
may have already been defined. Don't define them again, but
do put them in the right order. */
child = lookup_decl_die (member);
if (child)
splice_child_die (context_die, child);
else
gen_decl_die (member, context_die);
}
/* Now output info about the function members (if any). */
for (member = TYPE_METHODS (type); member; member = TREE_CHAIN (member))
{
/* Don't include clones in the member list. */
if (DECL_ABSTRACT_ORIGIN (member))
continue;
child = lookup_decl_die (member);
if (child)
splice_child_die (context_die, child);
else
gen_decl_die (member, context_die);
}
}
/* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
is set, we pretend that the type was never defined, so we only get the
member DIEs needed by later specification DIEs. */
static void
gen_struct_or_union_type_die (type, context_die)
tree type;
dw_die_ref context_die;
{
dw_die_ref type_die = lookup_type_die (type);
dw_die_ref scope_die = 0;
int nested = 0;
int complete = (TYPE_SIZE (type)
&& (! TYPE_STUB_DECL (type)
|| ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
if (type_die && ! complete)
return;
if (TYPE_CONTEXT (type) != NULL_TREE
&& AGGREGATE_TYPE_P (TYPE_CONTEXT (type)))
nested = 1;
scope_die = scope_die_for (type, context_die);
if (! type_die || (nested && scope_die == comp_unit_die))
/* First occurrence of type or toplevel definition of nested class. */
{
dw_die_ref old_die = type_die;
type_die = new_die (TREE_CODE (type) == RECORD_TYPE
? DW_TAG_structure_type : DW_TAG_union_type,
scope_die, type);
equate_type_number_to_die (type, type_die);
if (old_die)
add_AT_die_ref (type_die, DW_AT_specification, old_die);
else
add_name_attribute (type_die, type_tag (type));
}
else
remove_AT (type_die, DW_AT_declaration);
/* If this type has been completed, then give it a byte_size attribute and
then give a list of members. */
if (complete)
{
/* Prevent infinite recursion in cases where the type of some member of
this type is expressed in terms of this type itself. */
TREE_ASM_WRITTEN (type) = 1;
add_byte_size_attribute (type_die, type);
if (TYPE_STUB_DECL (type) != NULL_TREE)
add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
/* If the first reference to this type was as the return type of an
inline function, then it may not have a parent. Fix this now. */
if (type_die->die_parent == NULL)
add_child_die (scope_die, type_die);
push_decl_scope (type);
gen_member_die (type, type_die);
pop_decl_scope ();
/* GNU extension: Record what type our vtable lives in. */
if (TYPE_VFIELD (type))
{
tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
gen_type_die (vtype, context_die);
add_AT_die_ref (type_die, DW_AT_containing_type,
lookup_type_die (vtype));
}
}
else
{
add_AT_flag (type_die, DW_AT_declaration, 1);
/* We don't need to do this for function-local types. */
if (TYPE_STUB_DECL (type)
&& ! decl_function_context (TYPE_STUB_DECL (type)))
VARRAY_PUSH_TREE (incomplete_types, type);
}
}
/* Generate a DIE for a subroutine _type_. */
static void
gen_subroutine_type_die (type, context_die)
tree type;
dw_die_ref context_die;
{
tree return_type = TREE_TYPE (type);
dw_die_ref subr_die
= new_die (DW_TAG_subroutine_type,
scope_die_for (type, context_die), type);
equate_type_number_to_die (type, subr_die);
add_prototyped_attribute (subr_die, type);
add_type_attribute (subr_die, return_type, 0, 0, context_die);
gen_formal_types_die (type, subr_die);
}
/* Generate a DIE for a type definition */
static void
gen_typedef_die (decl, context_die)
tree decl;
dw_die_ref context_die;
{
dw_die_ref type_die;
tree origin;
if (TREE_ASM_WRITTEN (decl))
return;
TREE_ASM_WRITTEN (decl) = 1;
type_die = new_die (DW_TAG_typedef, context_die, decl);
origin = decl_ultimate_origin (decl);
if (origin != NULL)
add_abstract_origin_attribute (type_die, origin);
else
{
tree type;
add_name_and_src_coords_attributes (type_die, decl);
if (DECL_ORIGINAL_TYPE (decl))
{
type = DECL_ORIGINAL_TYPE (decl);
if (type == TREE_TYPE (decl))
abort ();
else
equate_type_number_to_die (TREE_TYPE (decl), type_die);
}
else
type = TREE_TYPE (decl);
add_type_attribute (type_die, type, TREE_READONLY (decl),
TREE_THIS_VOLATILE (decl), context_die);
}
if (DECL_ABSTRACT (decl))
equate_decl_number_to_die (decl, type_die);
}
/* Generate a type description DIE. */
static void
gen_type_die (type, context_die)
tree type;
dw_die_ref context_die;
{
int need_pop;
if (type == NULL_TREE || type == error_mark_node)
return;
if (TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
&& DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
{
if (TREE_ASM_WRITTEN (type))
return;
/* Prevent broken recursion; we can't hand off to the same type. */
if (DECL_ORIGINAL_TYPE (TYPE_NAME (type)) == type)
abort ();
TREE_ASM_WRITTEN (type) = 1;
gen_decl_die (TYPE_NAME (type), context_die);
return;
}
/* We are going to output a DIE to represent the unqualified version
of this type (i.e. without any const or volatile qualifiers) so
get the main variant (i.e. the unqualified version) of this type
now. (Vectors are special because the debugging info is in the
cloned type itself). */
if (TREE_CODE (type) != VECTOR_TYPE)
type = type_main_variant (type);
if (TREE_ASM_WRITTEN (type))
return;
switch (TREE_CODE (type))
{
case ERROR_MARK:
break;
case POINTER_TYPE:
case REFERENCE_TYPE:
/* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
ensures that the gen_type_die recursion will terminate even if the
type is recursive. Recursive types are possible in Ada. */
/* ??? We could perhaps do this for all types before the switch
statement. */
TREE_ASM_WRITTEN (type) = 1;
/* For these types, all that is required is that we output a DIE (or a
set of DIEs) to represent the "basis" type. */
gen_type_die (TREE_TYPE (type), context_die);
break;
case OFFSET_TYPE:
/* This code is used for C++ pointer-to-data-member types.
Output a description of the relevant class type. */
gen_type_die (TYPE_OFFSET_BASETYPE (type), context_die);
/* Output a description of the type of the object pointed to. */
gen_type_die (TREE_TYPE (type), context_die);
/* Now output a DIE to represent this pointer-to-data-member type
itself. */
gen_ptr_to_mbr_type_die (type, context_die);
break;
case SET_TYPE:
gen_type_die (TYPE_DOMAIN (type), context_die);
gen_set_type_die (type, context_die);
break;
case FILE_TYPE:
gen_type_die (TREE_TYPE (type), context_die);
abort (); /* No way to represent these in Dwarf yet! */
break;
case FUNCTION_TYPE:
/* Force out return type (in case it wasn't forced out already). */
gen_type_die (TREE_TYPE (type), context_die);
gen_subroutine_type_die (type, context_die);
break;
case METHOD_TYPE:
/* Force out return type (in case it wasn't forced out already). */
gen_type_die (TREE_TYPE (type), context_die);
gen_subroutine_type_die (type, context_die);
break;
case ARRAY_TYPE:
if (TYPE_STRING_FLAG (type) && TREE_CODE (TREE_TYPE (type)) == CHAR_TYPE)
{
gen_type_die (TREE_TYPE (type), context_die);
gen_string_type_die (type, context_die);
}
else
gen_array_type_die (type, context_die);
break;
case VECTOR_TYPE:
gen_array_type_die (type, context_die);
break;
case ENUMERAL_TYPE:
case RECORD_TYPE:
case UNION_TYPE:
case QUAL_UNION_TYPE:
/* If this is a nested type whose containing class hasn't been written
out yet, writing it out will cover this one, too. This does not apply
to instantiations of member class templates; they need to be added to
the containing class as they are generated. FIXME: This hurts the
idea of combining type decls from multiple TUs, since we can't predict
what set of template instantiations we'll get. */
if (TYPE_CONTEXT (type)
&& AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
&& ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
{
gen_type_die (TYPE_CONTEXT (type), context_die);
if (TREE_ASM_WRITTEN (type))
return;
/* If that failed, attach ourselves to the stub. */
push_decl_scope (TYPE_CONTEXT (type));
context_die = lookup_type_die (TYPE_CONTEXT (type));
need_pop = 1;
}
else
need_pop = 0;
if (TREE_CODE (type) == ENUMERAL_TYPE)
gen_enumeration_type_die (type, context_die);
else
gen_struct_or_union_type_die (type, context_die);
if (need_pop)
pop_decl_scope ();
/* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
it up if it is ever completed. gen_*_type_die will set it for us
when appropriate. */
return;
case VOID_TYPE:
case INTEGER_TYPE:
case REAL_TYPE:
case COMPLEX_TYPE:
case BOOLEAN_TYPE:
case CHAR_TYPE:
/* No DIEs needed for fundamental types. */
break;
case LANG_TYPE:
/* No Dwarf representation currently defined. */
break;
default:
abort ();
}
TREE_ASM_WRITTEN (type) = 1;
}
/* Generate a DIE for a tagged type instantiation. */
static void
gen_tagged_type_instantiation_die (type, context_die)
tree type;
dw_die_ref context_die;
{
if (type == NULL_TREE || type == error_mark_node)
return;
/* We are going to output a DIE to represent the unqualified version of
this type (i.e. without any const or volatile qualifiers) so make sure
that we have the main variant (i.e. the unqualified version) of this
type now. */
if (type != type_main_variant (type))
abort ();
/* Do not check TREE_ASM_WRITTEN (type) as it may not be set if this is
an instance of an unresolved type. */
switch (TREE_CODE (type))
{
case ERROR_MARK:
break;
case ENUMERAL_TYPE:
gen_inlined_enumeration_type_die (type, context_die);
break;
case RECORD_TYPE:
gen_inlined_structure_type_die (type, context_die);
break;
case UNION_TYPE:
case QUAL_UNION_TYPE:
gen_inlined_union_type_die (type, context_die);
break;
default:
abort ();
}
}
/* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
things which are local to the given block. */
static void
gen_block_die (stmt, context_die, depth)
tree stmt;
dw_die_ref context_die;
int depth;
{
int must_output_die = 0;
tree origin;
tree decl;
enum tree_code origin_code;
/* Ignore blocks never really used to make RTL. */
if (stmt == NULL_TREE || !TREE_USED (stmt)
|| (!TREE_ASM_WRITTEN (stmt) && !BLOCK_ABSTRACT (stmt)))
return;
/* If the block is one fragment of a non-contiguous block, do not
process the variables, since they will have been done by the
origin block. Do process subblocks. */
if (BLOCK_FRAGMENT_ORIGIN (stmt))
{
tree sub;
for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
gen_block_die (sub, context_die, depth + 1);
return;
}
/* Determine the "ultimate origin" of this block. This block may be an
inlined instance of an inlined instance of inline function, so we have
to trace all of the way back through the origin chain to find out what
sort of node actually served as the original seed for the creation of
the current block. */
origin = block_ultimate_origin (stmt);
origin_code = (origin != NULL) ? TREE_CODE (origin) : ERROR_MARK;
/* Determine if we need to output any Dwarf DIEs at all to represent this
block. */
if (origin_code == FUNCTION_DECL)
/* The outer scopes for inlinings *must* always be represented. We
generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
must_output_die = 1;
else
{
/* In the case where the current block represents an inlining of the
"body block" of an inline function, we must *NOT* output any DIE for
this block because we have already output a DIE to represent the whole
inlined function scope and the "body block" of any function doesn't
really represent a different scope according to ANSI C rules. So we
check here to make sure that this block does not represent a "body
block inlining" before trying to set the MUST_OUTPUT_DIE flag. */
if (! is_body_block (origin ? origin : stmt))
{
/* Determine if this block directly contains any "significant"
local declarations which we will need to output DIEs for. */
if (debug_info_level > DINFO_LEVEL_TERSE)
/* We are not in terse mode so *any* local declaration counts
as being a "significant" one. */
must_output_die = (BLOCK_VARS (stmt) != NULL);
else
/* We are in terse mode, so only local (nested) function
definitions count as "significant" local declarations. */
for (decl = BLOCK_VARS (stmt);
decl != NULL; decl = TREE_CHAIN (decl))
if (TREE_CODE (decl) == FUNCTION_DECL
&& DECL_INITIAL (decl))
{
must_output_die = 1;
break;
}
}
}
/* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
DIE for any block which contains no significant local declarations at
all. Rather, in such cases we just call `decls_for_scope' so that any
needed Dwarf info for any sub-blocks will get properly generated. Note
that in terse mode, our definition of what constitutes a "significant"
local declaration gets restricted to include only inlined function
instances and local (nested) function definitions. */
if (must_output_die)
{
if (origin_code == FUNCTION_DECL)
gen_inlined_subroutine_die (stmt, context_die, depth);
else
gen_lexical_block_die (stmt, context_die, depth);
}
else
decls_for_scope (stmt, context_die, depth);
}
/* Generate all of the decls declared within a given scope and (recursively)
all of its sub-blocks. */
static void
decls_for_scope (stmt, context_die, depth)
tree stmt;
dw_die_ref context_die;
int depth;
{
tree decl;
tree subblocks;
/* Ignore blocks never really used to make RTL. */
if (stmt == NULL_TREE || ! TREE_USED (stmt))
return;
/* Output the DIEs to represent all of the data objects and typedefs
declared directly within this block but not within any nested
sub-blocks. Also, nested function and tag DIEs have been
generated with a parent of NULL; fix that up now. */
for (decl = BLOCK_VARS (stmt); decl != NULL; decl = TREE_CHAIN (decl))
{
dw_die_ref die;
if (TREE_CODE (decl) == FUNCTION_DECL)
die = lookup_decl_die (decl);
else if (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl))
die = lookup_type_die (TREE_TYPE (decl));
else
die = NULL;
if (die != NULL && die->die_parent == NULL)
add_child_die (context_die, die);
else
gen_decl_die (decl, context_die);
}
/* If we're at -g1, we're not interested in subblocks. */
if (debug_info_level <= DINFO_LEVEL_TERSE)
return;
/* Output the DIEs to represent all sub-blocks (and the items declared
therein) of this block. */
for (subblocks = BLOCK_SUBBLOCKS (stmt);
subblocks != NULL;
subblocks = BLOCK_CHAIN (subblocks))
gen_block_die (subblocks, context_die, depth + 1);
}
/* Is this a typedef we can avoid emitting? */
static inline int
is_redundant_typedef (decl)
tree decl;
{
if (TYPE_DECL_IS_STUB (decl))
return 1;
if (DECL_ARTIFICIAL (decl)
&& DECL_CONTEXT (decl)
&& is_tagged_type (DECL_CONTEXT (decl))
&& TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
&& DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
/* Also ignore the artificial member typedef for the class name. */
return 1;
return 0;
}
/* Generate Dwarf debug information for a decl described by DECL. */
static void
gen_decl_die (decl, context_die)
tree decl;
dw_die_ref context_die;
{
tree origin;
if (DECL_P (decl) && DECL_IGNORED_P (decl))
return;
switch (TREE_CODE (decl))
{
case ERROR_MARK:
break;
case CONST_DECL:
/* The individual enumerators of an enum type get output when we output
the Dwarf representation of the relevant enum type itself. */
break;
case FUNCTION_DECL:
/* Don't output any DIEs to represent mere function declarations,
unless they are class members or explicit block externs. */
if (DECL_INITIAL (decl) == NULL_TREE && DECL_CONTEXT (decl) == NULL_TREE
&& (current_function_decl == NULL_TREE || DECL_ARTIFICIAL (decl)))
break;
/* If we're emitting a clone, emit info for the abstract instance. */
if (DECL_ORIGIN (decl) != decl)
dwarf2out_abstract_function (DECL_ABSTRACT_ORIGIN (decl));
/* If we're emitting an out-of-line copy of an inline function,
emit info for the abstract instance and set up to refer to it. */
else if (DECL_INLINE (decl) && ! DECL_ABSTRACT (decl)
&& ! class_scope_p (context_die)
/* dwarf2out_abstract_function won't emit a die if this is just
a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
that case, because that works only if we have a die. */
&& DECL_INITIAL (decl) != NULL_TREE)
{
dwarf2out_abstract_function (decl);
set_decl_origin_self (decl);
}
/* Otherwise we're emitting the primary DIE for this decl. */
else if (debug_info_level > DINFO_LEVEL_TERSE)
{
/* Before we describe the FUNCTION_DECL itself, make sure that we
have described its return type. */
gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
/* And its virtual context. */
if (DECL_VINDEX (decl) != NULL_TREE)
gen_type_die (DECL_CONTEXT (decl), context_die);
/* And its containing type. */
origin = decl_class_context (decl);
if (origin != NULL_TREE)
gen_type_die_for_member (origin, decl, context_die);
}
/* Now output a DIE to represent the function itself. */
gen_subprogram_die (decl, context_die);
break;
case TYPE_DECL:
/* If we are in terse mode, don't generate any DIEs to represent any
actual typedefs. */
if (debug_info_level <= DINFO_LEVEL_TERSE)
break;
/* In the special case of a TYPE_DECL node representing the declaration
of some type tag, if the given TYPE_DECL is marked as having been
instantiated from some other (original) TYPE_DECL node (e.g. one which
was generated within the original definition of an inline function) we
have to generate a special (abbreviated) DW_TAG_structure_type,
DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. */
if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
{
gen_tagged_type_instantiation_die (TREE_TYPE (decl), context_die);
break;
}
if (is_redundant_typedef (decl))
gen_type_die (TREE_TYPE (decl), context_die);
else
/* Output a DIE to represent the typedef itself. */
gen_typedef_die (decl, context_die);
break;
case LABEL_DECL:
if (debug_info_level >= DINFO_LEVEL_NORMAL)
gen_label_die (decl, context_die);
break;
case VAR_DECL:
/* If we are in terse mode, don't generate any DIEs to represent any
variable declarations or definitions. */
if (debug_info_level <= DINFO_LEVEL_TERSE)
break;
/* Output any DIEs that are needed to specify the type of this data
object. */
gen_type_die (TREE_TYPE (decl), context_die);
/* And its containing type. */
origin = decl_class_context (decl);
if (origin != NULL_TREE)
gen_type_die_for_member (origin, decl, context_die);
/* Now output the DIE to represent the data object itself. This gets
complicated because of the possibility that the VAR_DECL really
represents an inlined instance of a formal parameter for an inline
function. */
origin = decl_ultimate_origin (decl);
if (origin != NULL_TREE && TREE_CODE (origin) == PARM_DECL)
gen_formal_parameter_die (decl, context_die);
else
gen_variable_die (decl, context_die);
break;
case FIELD_DECL:
/* Ignore the nameless fields that are used to skip bits but handle C++
anonymous unions. */
if (DECL_NAME (decl) != NULL_TREE
|| TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE)
{
gen_type_die (member_declared_type (decl), context_die);
gen_field_die (decl, context_die);
}
break;
case PARM_DECL:
gen_type_die (TREE_TYPE (decl), context_die);
gen_formal_parameter_die (decl, context_die);
break;
case NAMESPACE_DECL:
/* Ignore for now. */
break;
default:
abort ();
}
}
static void
mark_limbo_die_list (ptr)
void *ptr ATTRIBUTE_UNUSED;
{
limbo_die_node *node;
for (node = limbo_die_list; node; node = node->next)
ggc_mark_tree (node->created_for);
}
/* Add Ada "use" clause information for SGI Workshop debugger. */
void
dwarf2out_add_library_unit_info (filename, context_list)
const char *filename;
const char *context_list;
{
unsigned int file_index;
if (filename != NULL)
{
dw_die_ref unit_die = new_die (DW_TAG_module, comp_unit_die, NULL);
tree context_list_decl
= build_decl (LABEL_DECL, get_identifier (context_list),
void_type_node);
TREE_PUBLIC (context_list_decl) = TRUE;
add_name_attribute (unit_die, context_list);
file_index = lookup_filename (filename);
add_AT_unsigned (unit_die, DW_AT_decl_file, file_index);
add_pubname (context_list_decl, unit_die);
}
}
/* Output debug information for global decl DECL. Called from toplev.c after
compilation proper has finished. */
static void
dwarf2out_global_decl (decl)
tree decl;
{
/* Output DWARF2 information for file-scope tentative data object
declarations, file-scope (extern) function declarations (which had no
corresponding body) and file-scope tagged type declarations and
definitions which have not yet been forced out. */
if (TREE_CODE (decl) != FUNCTION_DECL || !DECL_INITIAL (decl))
dwarf2out_decl (decl);
}
/* Write the debugging output for DECL. */
void
dwarf2out_decl (decl)
tree decl;
{
dw_die_ref context_die = comp_unit_die;
switch (TREE_CODE (decl))
{
case ERROR_MARK:
return;
case FUNCTION_DECL:
/* Ignore this FUNCTION_DECL if it refers to a builtin declaration of a
builtin function. Explicit programmer-supplied declarations of
these same functions should NOT be ignored however. */
if (DECL_EXTERNAL (decl) && DECL_BUILT_IN (decl))
return;
/* What we would really like to do here is to filter out all mere
file-scope declarations of file-scope functions which are never
referenced later within this translation unit (and keep all of ones
that *are* referenced later on) but we aren't clairvoyant, so we have
no idea which functions will be referenced in the future (i.e. later
on within the current translation unit). So here we just ignore all
file-scope function declarations which are not also definitions. If
and when the debugger needs to know something about these functions,
it will have to hunt around and find the DWARF information associated
with the definition of the function.
We can't just check DECL_EXTERNAL to find out which FUNCTION_DECL
nodes represent definitions and which ones represent mere
declarations. We have to check DECL_INITIAL instead. That's because
the C front-end supports some weird semantics for "extern inline"
function definitions. These can get inlined within the current
translation unit (an thus, we need to generate Dwarf info for their
abstract instances so that the Dwarf info for the concrete inlined
instances can have something to refer to) but the compiler never
generates any out-of-lines instances of such things (despite the fact
that they *are* definitions).
The important point is that the C front-end marks these "extern
inline" functions as DECL_EXTERNAL, but we need to generate DWARF for
them anyway. Note that the C++ front-end also plays some similar games
for inline function definitions appearing within include files which
also contain `#pragma interface' pragmas. */
if (DECL_INITIAL (decl) == NULL_TREE)
return;
/* If we're a nested function, initially use a parent of NULL; if we're
a plain function, this will be fixed up in decls_for_scope. If
we're a method, it will be ignored, since we already have a DIE. */
if (decl_function_context (decl)
/* But if we're in terse mode, we don't care about scope. */
&& debug_info_level > DINFO_LEVEL_TERSE)
context_die = NULL;
break;
case VAR_DECL:
/* Ignore this VAR_DECL if it refers to a file-scope extern data object
declaration and if the declaration was never even referenced from
within this entire compilation unit. We suppress these DIEs in
order to save space in the .debug section (by eliminating entries
which are probably useless). Note that we must not suppress
block-local extern declarations (whether used or not) because that
would screw-up the debugger's name lookup mechanism and cause it to
miss things which really ought to be in scope at a given point. */
if (DECL_EXTERNAL (decl) && !TREE_USED (decl))
return;
/* If we are in terse mode, don't generate any DIEs to represent any
variable declarations or definitions. */
if (debug_info_level <= DINFO_LEVEL_TERSE)
return;
break;
case TYPE_DECL:
/* Don't emit stubs for types unless they are needed by other DIEs. */
if (TYPE_DECL_SUPPRESS_DEBUG (decl))
return;
/* Don't bother trying to generate any DIEs to represent any of the
normal built-in types for the language we are compiling. */
if (DECL_SOURCE_LINE (decl) == 0)
{
/* OK, we need to generate one for `bool' so GDB knows what type
comparisons have. */
if ((get_AT_unsigned (comp_unit_die, DW_AT_language)
== DW_LANG_C_plus_plus)
&& TREE_CODE (TREE_TYPE (decl)) == BOOLEAN_TYPE
&& ! DECL_IGNORED_P (decl))
modified_type_die (TREE_TYPE (decl), 0, 0, NULL);
return;
}
/* If we are in terse mode, don't generate any DIEs for types. */
if (debug_info_level <= DINFO_LEVEL_TERSE)
return;
/* If we're a function-scope tag, initially use a parent of NULL;
this will be fixed up in decls_for_scope. */
if (decl_function_context (decl))
context_die = NULL;
break;
default:
return;
}
gen_decl_die (decl, context_die);
}
/* Output a marker (i.e. a label) for the beginning of the generated code for
a lexical block. */
static void
dwarf2out_begin_block (line, blocknum)
unsigned int line ATTRIBUTE_UNUSED;
unsigned int blocknum;
{
function_section (current_function_decl);
ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
}
/* Output a marker (i.e. a label) for the end of the generated code for a
lexical block. */
static void
dwarf2out_end_block (line, blocknum)
unsigned int line ATTRIBUTE_UNUSED;
unsigned int blocknum;
{
function_section (current_function_decl);
ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
}
/* Returns nonzero if it is appropriate not to emit any debugging
information for BLOCK, because it doesn't contain any instructions.
Don't allow this for blocks with nested functions or local classes
as we would end up with orphans, and in the presence of scheduling
we may end up calling them anyway. */
static bool
dwarf2out_ignore_block (block)
tree block;
{
tree decl;
for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
if (TREE_CODE (decl) == FUNCTION_DECL
|| (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
return 0;
return 1;
}
/* Lookup FILE_NAME (in the list of filenames that we know about here in
dwarf2out.c) and return its "index". The index of each (known) filename is
just a unique number which is associated with only that one filename. We
need such numbers for the sake of generating labels (in the .debug_sfnames
section) and references to those files numbers (in the .debug_srcinfo
and.debug_macinfo sections). If the filename given as an argument is not
found in our current list, add it to the list and assign it the next
available unique index number. In order to speed up searches, we remember
the index of the filename was looked up last. This handles the majority of
all searches. */
static unsigned
lookup_filename (file_name)
const char *file_name;
{
unsigned i;
/* ??? Why isn't DECL_SOURCE_FILE left null instead. */
if (strcmp (file_name, "<internal>") == 0
|| strcmp (file_name, "<built-in>") == 0)
return 0;
/* Check to see if the file name that was searched on the previous
call matches this file name. If so, return the index. */
if (file_table.last_lookup_index != 0)
if (0 == strcmp (file_name,
file_table.table[file_table.last_lookup_index]))
return file_table.last_lookup_index;
/* Didn't match the previous lookup, search the table */
for (i = 1; i < file_table.in_use; i++)
if (strcmp (file_name, file_table.table[i]) == 0)
{
file_table.last_lookup_index = i;
return i;
}
/* Prepare to add a new table entry by making sure there is enough space in
the table to do so. If not, expand the current table. */
if (i == file_table.allocated)
{
file_table.allocated = i + FILE_TABLE_INCREMENT;
file_table.table = (char **)
xrealloc (file_table.table, file_table.allocated * sizeof (char *));
}
#ifdef KEY
// Bug 948
if (file_table.allocated == 0)
{
file_table.allocated = i;
file_table.table = (char **)
xrealloc (file_table.table, file_table.allocated * sizeof (char *));
}
#endif
/* Add the new entry to the end of the filename table. */
file_table.table[i] = xstrdup (file_name);
file_table.in_use = i + 1;
file_table.last_lookup_index = i;
#ifndef KEY
// Bug 948
if (DWARF2_ASM_LINE_DEBUG_INFO)
{
fprintf (asm_out_file, "\t.file %u ", i);
output_quoted_string (asm_out_file, file_name);
fputc ('\n', asm_out_file);
}
#endif
return i;
}
static void
init_file_table ()
{
/* Allocate the initial hunk of the file_table. */
file_table.table = (char **) xcalloc (FILE_TABLE_INCREMENT, sizeof (char *));
file_table.allocated = FILE_TABLE_INCREMENT;
/* Skip the first entry - file numbers begin at 1. */
file_table.in_use = 1;
file_table.last_lookup_index = 0;
}
/* Output a label to mark the beginning of a source code line entry
and record information relating to this source line, in
'line_info_table' for later output of the .debug_line section. */
static void
dwarf2out_source_line (line, filename)
unsigned int line;
const char *filename;
{
if (debug_info_level >= DINFO_LEVEL_NORMAL)
{
function_section (current_function_decl);
/* If requested, emit something human-readable. */
if (flag_debug_asm)
fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
filename, line);
if (DWARF2_ASM_LINE_DEBUG_INFO)
{
unsigned file_num = lookup_filename (filename);
/* Emit the .loc directive understood by GNU as. */
fprintf (asm_out_file, "\t.loc %d %d 0\n", file_num, line);
/* Indicate that line number info exists. */
line_info_table_in_use++;
/* Indicate that multiple line number tables exist. */
if (DECL_SECTION_NAME (current_function_decl))
separate_line_info_table_in_use++;
}
else if (DECL_SECTION_NAME (current_function_decl))
{
dw_separate_line_info_ref line_info;
ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, SEPARATE_LINE_CODE_LABEL,
separate_line_info_table_in_use);
/* expand the line info table if necessary */
if (separate_line_info_table_in_use
== separate_line_info_table_allocated)
{
separate_line_info_table_allocated += LINE_INFO_TABLE_INCREMENT;
separate_line_info_table
= (dw_separate_line_info_ref)
xrealloc (separate_line_info_table,
separate_line_info_table_allocated
* sizeof (dw_separate_line_info_entry));
}
/* Add the new entry at the end of the line_info_table. */
line_info
= &separate_line_info_table[separate_line_info_table_in_use++];
line_info->dw_file_num = lookup_filename (filename);
line_info->dw_line_num = line;
line_info->function = current_function_funcdef_no;
}
else
{
dw_line_info_ref line_info;
ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, LINE_CODE_LABEL,
line_info_table_in_use);
/* Expand the line info table if necessary. */
if (line_info_table_in_use == line_info_table_allocated)
{
line_info_table_allocated += LINE_INFO_TABLE_INCREMENT;
line_info_table
= (dw_line_info_ref)
xrealloc (line_info_table,
(line_info_table_allocated
* sizeof (dw_line_info_entry)));
}
/* Add the new entry at the end of the line_info_table. */
line_info = &line_info_table[line_info_table_in_use++];
line_info->dw_file_num = lookup_filename (filename);
line_info->dw_line_num = line;
}
}
}
/* Record the beginning of a new source file. */
static void
dwarf2out_start_source_file (lineno, filename)
unsigned int lineno;
const char *filename;
{
#ifndef KEY
if (flag_eliminate_dwarf2_dups && !is_main_source)
{
/* Record the beginning of the file for break_out_includes. */
dw_die_ref bincl_die;
bincl_die = new_die (DW_TAG_GNU_BINCL, comp_unit_die, NULL);
add_AT_string (bincl_die, DW_AT_name, filename);
}
#endif
is_main_source = 0;
if (debug_info_level >= DINFO_LEVEL_VERBOSE)
{
#ifdef KEY
WFE_Macro_Start_File(lineno, lookup_filename (filename));
return;
#endif
named_section_flags (DEBUG_MACINFO_SECTION, SECTION_DEBUG);
dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
dw2_asm_output_data_uleb128 (lineno, "Included from line number %d",
lineno);
dw2_asm_output_data_uleb128 (lookup_filename (filename),
"Filename we just started");
}
}
/* Record the end of a source file. */
static void
dwarf2out_end_source_file (lineno)
unsigned int lineno ATTRIBUTE_UNUSED;
{
#ifndef KEY
if (flag_eliminate_dwarf2_dups)
/* Record the end of the file for break_out_includes. */
new_die (DW_TAG_GNU_EINCL, comp_unit_die, NULL);
#endif
if (debug_info_level >= DINFO_LEVEL_VERBOSE)
{
#ifdef KEY
WFE_Macro_End_File();
return;
#endif
named_section_flags (DEBUG_MACINFO_SECTION, SECTION_DEBUG);
dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
}
}
/* Called from debug_define in toplev.c. The `buffer' parameter contains
the tail part of the directive line, i.e. the part which is past the
initial whitespace, #, whitespace, directive-name, whitespace part. */
static void
dwarf2out_define (lineno, buffer)
unsigned lineno ATTRIBUTE_UNUSED;
const char *buffer ATTRIBUTE_UNUSED;
{
if (debug_info_level >= DINFO_LEVEL_VERBOSE)
{
#ifdef SGI_MONGOOSE
WFE_Macro_Define(lineno, buffer);
return;
#endif
named_section_flags (DEBUG_MACINFO_SECTION, SECTION_DEBUG);
dw2_asm_output_data (1, DW_MACINFO_define, "Define macro");
dw2_asm_output_data_uleb128 (lineno, "At line number %d", lineno);
dw2_asm_output_nstring (buffer, -1, "The macro");
}
}
/* Called from debug_undef in toplev.c. The `buffer' parameter contains
the tail part of the directive line, i.e. the part which is past the
initial whitespace, #, whitespace, directive-name, whitespace part. */
static void
dwarf2out_undef (lineno, buffer)
unsigned lineno ATTRIBUTE_UNUSED;
const char *buffer ATTRIBUTE_UNUSED;
{
if (debug_info_level >= DINFO_LEVEL_VERBOSE)
{
#ifdef SGI_MONGOOSE
WFE_Macro_Undef(lineno, buffer);
return;
#endif
named_section_flags (DEBUG_MACINFO_SECTION, SECTION_DEBUG);
dw2_asm_output_data (1, DW_MACINFO_undef, "Undefine macro");
dw2_asm_output_data_uleb128 (lineno, "At line number %d", lineno);
dw2_asm_output_nstring (buffer, -1, "The macro");
}
}
/* Set up for Dwarf output at the start of compilation. */
static void
dwarf2out_init (main_input_filename)
const char *main_input_filename;
{
init_file_table ();
/* Remember the name of the primary input file. */
primary_filename = main_input_filename;
/* Add it to the file table first, under the assumption that we'll
be emitting line number data for it first, which avoids having
to add an initial DW_LNS_set_file. */
lookup_filename (main_input_filename);
/* Allocate the initial hunk of the decl_die_table. */
decl_die_table
= (dw_die_ref *) xcalloc (DECL_DIE_TABLE_INCREMENT, sizeof (dw_die_ref));
decl_die_table_allocated = DECL_DIE_TABLE_INCREMENT;
decl_die_table_in_use = 0;
/* Allocate the initial hunk of the decl_scope_table. */
VARRAY_TREE_INIT (decl_scope_table, 256, "decl_scope_table");
/* Allocate the initial hunk of the abbrev_die_table. */
abbrev_die_table
= (dw_die_ref *) xcalloc (ABBREV_DIE_TABLE_INCREMENT,
sizeof (dw_die_ref));
abbrev_die_table_allocated = ABBREV_DIE_TABLE_INCREMENT;
/* Zero-th entry is allocated, but unused */
abbrev_die_table_in_use = 1;
/* Allocate the initial hunk of the line_info_table. */
line_info_table
= (dw_line_info_ref) xcalloc (LINE_INFO_TABLE_INCREMENT,
sizeof (dw_line_info_entry));
line_info_table_allocated = LINE_INFO_TABLE_INCREMENT;
/* Zero-th entry is allocated, but unused */
line_info_table_in_use = 1;
/* Generate the initial DIE for the .debug section. Note that the (string)
value given in the DW_AT_name attribute of the DW_TAG_compile_unit DIE
will (typically) be a relative pathname and that this pathname should be
taken as being relative to the directory from which the compiler was
invoked when the given (base) source file was compiled. */
comp_unit_die = gen_compile_unit_die (main_input_filename);
is_main_source = 1;
VARRAY_TREE_INIT (incomplete_types, 64, "incomplete_types");
VARRAY_RTX_INIT (used_rtx_varray, 32, "used_rtx_varray");
ggc_add_root (&limbo_die_list, 1, 1, mark_limbo_die_list);
ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
DEBUG_ABBREV_SECTION_LABEL, 0);
if (DWARF2_GENERATE_TEXT_SECTION_LABEL)
ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
else
strcpy (text_section_label, stripattributes (TEXT_SECTION_NAME));
ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
DEBUG_INFO_SECTION_LABEL, 0);
ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
DEBUG_LINE_SECTION_LABEL, 0);
ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
DEBUG_RANGES_SECTION_LABEL, 0);
named_section_flags (DEBUG_ABBREV_SECTION, SECTION_DEBUG);
ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
named_section_flags (DEBUG_INFO_SECTION, SECTION_DEBUG);
ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
named_section_flags (DEBUG_LINE_SECTION, SECTION_DEBUG);
ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
if (debug_info_level >= DINFO_LEVEL_VERBOSE)
{
named_section_flags (DEBUG_MACINFO_SECTION, SECTION_DEBUG);
ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
DEBUG_MACINFO_SECTION_LABEL, 0);
ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
}
if (DWARF2_GENERATE_TEXT_SECTION_LABEL)
{
text_section ();
ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
}
}
/* Allocate a string in .debug_str hash table. */
static hashnode
indirect_string_alloc (tab)
hash_table *tab ATTRIBUTE_UNUSED;
{
struct indirect_string_node *node;
node = xmalloc (sizeof (struct indirect_string_node));
node->refcount = 0;
node->form = 0;
node->label = NULL;
return (hashnode) node;
}
/* A helper function for dwarf2out_finish called through
ht_forall. Emit one queued .debug_str string. */
static int
output_indirect_string (pfile, h, v)
struct cpp_reader *pfile ATTRIBUTE_UNUSED;
hashnode h;
const PTR v ATTRIBUTE_UNUSED;
{
struct indirect_string_node *node = (struct indirect_string_node *) h;
if (node->form == DW_FORM_strp)
{
named_section_flags (DEBUG_STR_SECTION, DEBUG_STR_SECTION_FLAGS);
ASM_OUTPUT_LABEL (asm_out_file, node->label);
assemble_string ((const char *) HT_STR (&node->id),
HT_LEN (&node->id) + 1);
}
return 1;
}
/* Output stuff that dwarf requires at the end of every file,
and generate the DWARF-2 debugging info. */
static void
dwarf2out_finish (input_filename)
const char *input_filename ATTRIBUTE_UNUSED;
{
limbo_die_node *node, *next_node;
dw_die_ref die = 0;
if (get_AT (comp_unit_die, DW_AT_comp_dir) == NULL)
{
char *wd = getpwd ();
unsigned i;
if (wd != NULL)
{
for (i = 1; i < file_table.in_use; i++)
if (file_table.table[i][0] != DIR_SEPARATOR)
{
add_AT_string (comp_unit_die, DW_AT_comp_dir, wd);
break;
}
}
}
/* Traverse the limbo die list, and add parent/child links. The only
dies without parents that should be here are concrete instances of
inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
For concrete instances, we can get the parent die from the abstract
instance. */
for (node = limbo_die_list; node; node = next_node)
{
next_node = node->next;
die = node->die;
if (die->die_parent == NULL)
{
dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
tree context;
if (origin)
add_child_die (origin->die_parent, die);
else if (die == comp_unit_die)
;
/* If this was an expression for a bound involved in a function
return type, it may be a SAVE_EXPR for which we weren't able
to find a DIE previously. So try now. */
else if (node->created_for
&& TREE_CODE (node->created_for) == SAVE_EXPR
&& 0 != (origin = (lookup_decl_die
(SAVE_EXPR_CONTEXT
(node->created_for)))))
add_child_die (origin, die);
else if (errorcount > 0 || sorrycount > 0)
/* It's OK to be confused by errors in the input. */
add_child_die (comp_unit_die, die);
else if (node->created_for
&& ((DECL_P (node->created_for)
&& (context = DECL_CONTEXT (node->created_for)))
|| (TYPE_P (node->created_for)
&& (context = TYPE_CONTEXT (node->created_for))))
&& TREE_CODE (context) == FUNCTION_DECL)
{
/* In certain situations, the lexical block containing a
nested function can be optimized away, which results
in the nested function die being orphaned. Likewise
with the return type of that nested function. Force
this to be a child of the containing function. */
origin = lookup_decl_die (context);
if (! origin)
abort ();
add_child_die (origin, die);
}
else
abort ();
}
free (node);
}
limbo_die_list = NULL;
/* Walk through the list of incomplete types again, trying once more to
emit full debugging info for them. */
retry_incomplete_types ();
/* We need to reverse all the dies before break_out_includes, or
we'll see the end of an include file before the beginning. */
reverse_all_dies (comp_unit_die);
/* Generate separate CUs for each of the include files we've seen.
They will go into limbo_die_list. */
if (flag_eliminate_dwarf2_dups)
break_out_includes (comp_unit_die);
/* Traverse the DIE's and add add sibling attributes to those DIE's
that have children. */
add_sibling_attributes (comp_unit_die);
for (node = limbo_die_list; node; node = node->next)
add_sibling_attributes (node->die);
/* Output a terminator label for the .text section. */
text_section ();
ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, TEXT_END_LABEL, 0);
/* Output the source line correspondence table. We must do this
even if there is no line information. Otherwise, on an empty
translation unit, we will generate a present, but empty,
.debug_info section. IRIX 6.5 `nm' will then complain when
examining the file. */
if (! DWARF2_ASM_LINE_DEBUG_INFO)
{
named_section_flags (DEBUG_LINE_SECTION, SECTION_DEBUG);
output_line_info ();
}
/* Output location list section if necessary. */
if (have_location_lists)
{
/* Output the location lists info. */
named_section_flags (DEBUG_LOC_SECTION, SECTION_DEBUG);
ASM_GENERATE_INTERNAL_LABEL (loc_section_label,
DEBUG_LOC_SECTION_LABEL, 0);
ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
output_location_lists (die);
have_location_lists = 0;
}
/* We can only use the low/high_pc attributes if all of the code was
in .text. */
if (separate_line_info_table_in_use == 0)
{
add_AT_lbl_id (comp_unit_die, DW_AT_low_pc, text_section_label);
add_AT_lbl_id (comp_unit_die, DW_AT_high_pc, text_end_label);
}
/* If it wasn't, we need to give .debug_loc and .debug_ranges an appropriate
"base address". Use zero so that these addresses become absolute. */
else if (have_location_lists || ranges_table_in_use)
add_AT_addr (comp_unit_die, DW_AT_entry_pc, const0_rtx);
if (debug_info_level >= DINFO_LEVEL_NORMAL)
add_AT_lbl_offset (comp_unit_die, DW_AT_stmt_list,
debug_line_section_label);
if (debug_info_level >= DINFO_LEVEL_VERBOSE)
add_AT_lbl_offset (comp_unit_die, DW_AT_macro_info, macinfo_section_label);
/* Output all of the compilation units. We put the main one last so that
the offsets are available to output_pubnames. */
for (node = limbo_die_list; node; node = node->next)
output_comp_unit (node->die, 0);
output_comp_unit (comp_unit_die, 0);
/* Output the abbreviation table. */
named_section_flags (DEBUG_ABBREV_SECTION, SECTION_DEBUG);
output_abbrev_section ();
/* Output public names table if necessary. */
if (pubname_table_in_use)
{
named_section_flags (DEBUG_PUBNAMES_SECTION, SECTION_DEBUG);
output_pubnames ();
}
/* Output the address range information. We only put functions in the arange
table, so don't write it out if we don't have any. */
if (fde_table_in_use)
{
named_section_flags (DEBUG_ARANGES_SECTION, SECTION_DEBUG);
output_aranges ();
}
/* Output ranges section if necessary. */
if (ranges_table_in_use)
{
named_section_flags (DEBUG_RANGES_SECTION, SECTION_DEBUG);
ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
output_ranges ();
}
/* Have to end the primary source file. */
if (debug_info_level >= DINFO_LEVEL_VERBOSE)
{
named_section_flags (DEBUG_MACINFO_SECTION, SECTION_DEBUG);
dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
dw2_asm_output_data (1, 0, "End compilation unit");
}
/* If we emitted any DW_FORM_strp form attribute, output the string
table too. */
if (debug_str_hash)
ht_forall (debug_str_hash, output_indirect_string, NULL);
}
#else
/* This should never be used, but its address is needed for comparisons. */
const struct gcc_debug_hooks dwarf2_debug_hooks;
#endif /* DWARF2_DEBUGGING_INFO */
#include "gt-dwarf2out.h"
|