text
stringlengths 3
1.05M
|
|---|
import { JSONResponse } from '../utils/json-response.js'
import * as cluster from '../cluster.js'
import { validate } from '../utils/auth-v1.js'
import { parseCidPinning } from '../utils/utils.js'
import { toPinsResponse } from '../utils/db-transforms.js'
/** @type {import('../utils/router.js').Handler} */
export async function pinsReplace(event, ctx) {
const { db, user, key } = await validate(event, ctx)
const existingCid = ctx.params.requestid
const existingUpload = await db.getUpload(existingCid, user.id)
if (!existingUpload) {
return new JSONResponse(
{ error: { reason: 'NOT_FOUND', details: 'pin not found' } },
{ status: 404 }
)
}
/** @type {import('../bindings').PinsAddInput} */
const pinData = await event.request.json()
// validate CID
const cid = parseCidPinning(pinData.cid)
if (!cid) {
return new JSONResponse(
{
error: {
reason: 'INVALID_PIN_DATA',
details: `Invalid request id: ${pinData.cid}`,
},
},
{ status: 400 }
)
}
if (pinData.cid === existingCid) {
return new JSONResponse(
{
error: {
reason: 'INVALID_PIN_DATA',
details: 'exiting and replacement CID are the same',
},
},
{ status: 400 }
)
}
// validate name
if (pinData.name && typeof pinData.name !== 'string') {
return new JSONResponse(
{ error: { reason: 'INVALID_PIN_DATA', details: 'invalid name' } },
{ status: 400 }
)
}
let meta
// validate meta
if (pinData.meta) {
if (typeof pinData.meta !== 'object' || Array.isArray(pinData.meta)) {
return new JSONResponse(
{ error: { reason: 'INVALID_PIN_DATA', details: 'invalid metadata' } },
{ status: 400 }
)
}
meta = Object.fromEntries(
Object.entries(pinData.meta).filter(([, v]) => typeof v === 'string')
)
}
await cluster.pin(cid.contentCid, {
origins: pinData.origins,
name: pinData.name,
metadata: pinData.meta,
})
const upload = await db.createUpload({
type: 'Remote',
content_cid: cid.contentCid,
source_cid: cid.sourceCid,
user_id: user.id,
key_id: key?.id,
origins: pinData.origins,
meta: pinData.meta,
name: pinData.name,
})
if (upload.content.pin[0].status === 'PinError') {
await cluster.recover(upload.content_cid)
}
await db.deleteUpload(existingCid, user.id)
return new JSONResponse(toPinsResponse(upload))
}
|
# qubit number=5
# total number=63
import cirq
import qiskit
from qiskit import IBMQ
from qiskit.providers.ibmq import least_busy
from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister
from qiskit import BasicAer, execute, transpile
from pprint import pprint
from qiskit.test.mock import FakeVigo
from math import log2,floor, sqrt, pi
import numpy as np
import networkx as nx
def build_oracle(n: int, f) -> QuantumCircuit:
# implement the oracle O_f^\pm
# NOTE: use U1 gate (P gate) with \lambda = 180 ==> CZ gate
# or multi_control_Z_gate (issue #127)
controls = QuantumRegister(n, "ofc")
oracle = QuantumCircuit(controls, name="Zf")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.h(controls[n])
if n >= 2:
oracle.mcu1(pi, controls[1:], controls[0])
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
return oracle
def make_circuit(n:int,f) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
classical = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classical)
prog.h(input_qubit[0]) # number=3
prog.x(input_qubit[4]) # number=53
prog.h(input_qubit[0]) # number=60
prog.cz(input_qubit[2],input_qubit[0]) # number=61
prog.h(input_qubit[0]) # number=62
prog.z(input_qubit[2]) # number=46
prog.h(input_qubit[0]) # number=54
prog.cz(input_qubit[2],input_qubit[0]) # number=55
prog.h(input_qubit[0]) # number=56
prog.h(input_qubit[1]) # number=4
prog.rx(2.664070570244145,input_qubit[1]) # number=39
prog.h(input_qubit[2]) # number=5
prog.h(input_qubit[3]) # number=6
prog.h(input_qubit[2]) # number=49
prog.cz(input_qubit[3],input_qubit[2]) # number=50
prog.h(input_qubit[2]) # number=51
prog.h(input_qubit[4]) # number=21
Zf = build_oracle(n, f)
repeat = floor(sqrt(2 ** n) * pi / 4)
for i in range(repeat):
prog.append(Zf.to_gate(), [input_qubit[i] for i in range(n)])
prog.h(input_qubit[0]) # number=1
prog.h(input_qubit[3]) # number=40
prog.y(input_qubit[4]) # number=35
prog.h(input_qubit[1]) # number=2
prog.h(input_qubit[2]) # number=7
prog.h(input_qubit[3]) # number=8
prog.h(input_qubit[0]) # number=25
prog.cz(input_qubit[1],input_qubit[0]) # number=26
prog.h(input_qubit[0]) # number=27
prog.h(input_qubit[0]) # number=36
prog.cz(input_qubit[1],input_qubit[0]) # number=37
prog.h(input_qubit[0]) # number=38
prog.cx(input_qubit[1],input_qubit[0]) # number=41
prog.x(input_qubit[0]) # number=42
prog.cx(input_qubit[1],input_qubit[0]) # number=43
prog.cx(input_qubit[1],input_qubit[0]) # number=34
prog.cx(input_qubit[1],input_qubit[0]) # number=24
prog.cx(input_qubit[0],input_qubit[1]) # number=29
prog.cx(input_qubit[2],input_qubit[3]) # number=44
prog.x(input_qubit[1]) # number=30
prog.cx(input_qubit[0],input_qubit[1]) # number=31
prog.x(input_qubit[2]) # number=11
prog.x(input_qubit[3]) # number=12
if n>=2:
prog.mcu1(pi,input_qubit[1:],input_qubit[0])
prog.x(input_qubit[0]) # number=13
prog.x(input_qubit[1]) # number=14
prog.x(input_qubit[2]) # number=15
prog.x(input_qubit[3]) # number=16
prog.h(input_qubit[0]) # number=17
prog.h(input_qubit[1]) # number=18
prog.h(input_qubit[2]) # number=19
prog.h(input_qubit[3]) # number=20
prog.cx(input_qubit[1],input_qubit[0]) # number=57
prog.z(input_qubit[1]) # number=58
prog.cx(input_qubit[1],input_qubit[0]) # number=59
# circuit end
for i in range(n):
prog.measure(input_qubit[i], classical[i])
return prog
if __name__ == '__main__':
key = "00000"
f = lambda rep: str(int(rep == key))
prog = make_circuit(5,f)
IBMQ.load_account()
provider = IBMQ.get_provider(hub='ibm-q')
provider.backends()
backend = least_busy(provider.backends(filters=lambda x: x.configuration().n_qubits >= 2 and not x.configuration().simulator and x.status().operational == True))
sample_shot =7924
info = execute(prog, backend=backend, shots=sample_shot).result().get_counts()
backend = FakeVigo()
circuit1 = transpile(prog,backend,optimization_level=2)
writefile = open("../data/startQiskit_QC1872.csv","w")
print(info,file=writefile)
print("results end", file=writefile)
print(circuit1.depth(),file=writefile)
print(circuit1,file=writefile)
writefile.close()
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
from collections import OrderedDict
import numpy as np
from astropy.modeling import models
from astropy.modeling.core import Model
from astropy.utils.misc import isiterable
from asdf.tags.core.ndarray import NDArrayType
from asdf_astropy.converters.transform.core import TransformConverterBase
__all__ = ['LabelMapperConverter', 'RegionsSelectorConverter']
class LabelMapperConverter(TransformConverterBase):
tags = ["tag:stsci.edu:gwcs/label_mapper-*"]
types = ["gwcs.selector.LabelMapperArray", "gwcs.selector.LabelMapperDict",
"gwcs.selector.LabelMapperRange", "gwcs.selector.LabelMapper"]
def from_yaml_tree_transform(self, node, tag, ctx):
from ..selector import (LabelMapperArray, LabelMapperDict,
LabelMapperRange, LabelMapper)
inputs_mapping = node.get('inputs_mapping', None)
if inputs_mapping is not None and not isinstance(inputs_mapping, models.Mapping):
raise TypeError("inputs_mapping must be an instance"
"of astropy.modeling.models.Mapping.")
mapper = node['mapper']
atol = node.get('atol', 1e-8)
no_label = node.get('no_label', np.nan)
if isinstance(mapper, NDArrayType):
if mapper.ndim != 2:
raise NotImplementedError("GWCS currently only supports 2D masks.")
return LabelMapperArray(mapper, inputs_mapping)
elif isinstance(mapper, Model):
inputs = node.get('inputs')
return LabelMapper(inputs, mapper, inputs_mapping=inputs_mapping, no_label=no_label)
else:
inputs = node.get('inputs', None)
if inputs is not None:
inputs = tuple(inputs)
labels = mapper.get('labels')
transforms = mapper.get('models')
if isiterable(labels[0]):
labels = [tuple(l) for l in labels]
dict_mapper = dict(zip(labels, transforms))
return LabelMapperRange(inputs, dict_mapper, inputs_mapping)
else:
dict_mapper = dict(zip(labels, transforms))
return LabelMapperDict(inputs, dict_mapper, inputs_mapping, atol=atol)
def to_yaml_tree_transform(self, model, tag, ctx):
from ..selector import (LabelMapperArray, LabelMapperDict,
LabelMapperRange, LabelMapper)
node = OrderedDict()
node['no_label'] = model.no_label
if model.inputs_mapping is not None:
node['inputs_mapping'] = model.inputs_mapping
if isinstance(model, LabelMapperArray):
node['mapper'] = model.mapper
elif isinstance(model, LabelMapper):
node['mapper'] = model.mapper
node['inputs'] = list(model.inputs)
elif isinstance(model, (LabelMapperDict, LabelMapperRange)):
if hasattr(model, 'atol'):
node['atol'] = model.atol
mapper = OrderedDict()
labels = list(model.mapper)
transforms = []
for k in labels:
transforms.append(model.mapper[k])
if isiterable(labels[0]):
labels = [list(l) for l in labels]
mapper['labels'] = labels
mapper['models'] = transforms
node['mapper'] = mapper
node['inputs'] = list(model.inputs)
else:
raise TypeError("Unrecognized type of LabelMapper - {0}".format(model))
return node
class RegionsSelectorConverter(TransformConverterBase):
tags = ["tag:stsci.edu:gwcs/regions_selector-*"]
types = ["gwcs.selector.RegionsSelector"]
def from_yaml_tree_transform(self, node, tag, ctx):
from ..selector import RegionsSelector
inputs = node['inputs']
outputs = node['outputs']
label_mapper = node['label_mapper']
undefined_transform_value = node['undefined_transform_value']
sel = node['selector']
sel = dict(zip(sel['labels'], sel['transforms']))
return RegionsSelector(inputs, outputs,
sel, label_mapper, undefined_transform_value)
def to_yaml_tree_transform(self, model, tag, ctx):
selector = OrderedDict()
node = OrderedDict()
labels = list(model.selector)
values = []
for l in labels:
values.append(model.selector[l])
selector['labels'] = labels
selector['transforms'] = values
node['inputs'] = list(model.inputs)
node['outputs'] = list(model.outputs)
node['selector'] = selector
node['label_mapper'] = model.label_mapper
node['undefined_transform_value'] = model.undefined_transform_value
return node
|
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports["default"] = undefined;
var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; };
var _class, _temp, _initialiseProps;
var _react = require('react');
var _react2 = _interopRequireDefault(_react);
var _gregorianCalendar = require('gregorian-calendar');
var _gregorianCalendar2 = _interopRequireDefault(_gregorianCalendar);
var _RangeCalendar = require('rc-calendar/lib/RangeCalendar');
var _RangeCalendar2 = _interopRequireDefault(_RangeCalendar);
var _Picker = require('rc-calendar/lib/Picker');
var _Picker2 = _interopRequireDefault(_Picker);
var _classnames = require('classnames');
var _classnames2 = _interopRequireDefault(_classnames);
var _icon = require('../icon');
var _icon2 = _interopRequireDefault(_icon);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
function _defaults(obj, defaults) { var keys = Object.getOwnPropertyNames(defaults); for (var i = 0; i < keys.length; i++) { var key = keys[i]; var value = Object.getOwnPropertyDescriptor(defaults, key); if (value && value.configurable && obj[key] === undefined) { Object.defineProperty(obj, key, value); } } return obj; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : _defaults(subClass, superClass); }
var RangePicker = (_temp = _class = function (_React$Component) {
_inherits(RangePicker, _React$Component);
function RangePicker(props) {
_classCallCheck(this, RangePicker);
var _this = _possibleConstructorReturn(this, _React$Component.call(this, props));
_initialiseProps.call(_this);
var _this$props = _this.props,
value = _this$props.value,
defaultValue = _this$props.defaultValue,
parseDateFromValue = _this$props.parseDateFromValue;
var start = value && value[0] || defaultValue[0];
var end = value && value[1] || defaultValue[1];
_this.state = {
value: [parseDateFromValue(start), parseDateFromValue(end)]
};
return _this;
}
RangePicker.prototype.componentWillReceiveProps = function componentWillReceiveProps(nextProps) {
if ('value' in nextProps) {
var value = nextProps.value || [];
var start = nextProps.parseDateFromValue(value[0]);
var end = nextProps.parseDateFromValue(value[1]);
this.setState({
value: [start, end]
});
}
};
RangePicker.prototype.render = function render() {
var props = this.props;
var locale = props.locale;
// 以下两行代码
// 给没有初始值的日期选择框提供本地化信息
// 否则会以周日开始排
var defaultCalendarValue = new _gregorianCalendar2["default"](locale);
defaultCalendarValue.setTime(Date.now());
var _props = this.props,
disabledDate = _props.disabledDate,
showTime = _props.showTime,
getCalendarContainer = _props.getCalendarContainer,
transitionName = _props.transitionName,
disabled = _props.disabled,
popupStyle = _props.popupStyle,
align = _props.align,
style = _props.style,
onOk = _props.onOk;
var state = this.state;
var calendarClassName = (0, _classnames2["default"])({
'ant-calendar-time': showTime
});
// 需要选择时间时,点击 ok 时才触发 onChange
var pickerChangeHandler = {
onChange: this.handleChange
};
var calendarHandler = {
onOk: this.handleChange
};
if (props.timePicker) {
pickerChangeHandler = {};
} else {
calendarHandler = {};
}
var startPlaceholder = 'startPlaceholder' in this.props ? props.startPlaceholder : locale.lang.rangePlaceholder[0];
var endPlaceholder = 'endPlaceholder' in props ? props.endPlaceholder : locale.lang.rangePlaceholder[1];
var calendar = _react2["default"].createElement(_RangeCalendar2["default"], _extends({
prefixCls: 'ant-calendar',
className: calendarClassName,
timePicker: props.timePicker,
disabledDate: disabledDate,
dateInputPlaceholder: [startPlaceholder, endPlaceholder],
locale: locale.lang,
onOk: onOk,
defaultValue: [defaultCalendarValue, defaultCalendarValue]
}, calendarHandler));
var clearIcon = !props.disabled && state.value && (state.value[0] || state.value[1]) ? _react2["default"].createElement(_icon2["default"], {
type: 'cross-circle',
className: 'ant-calendar-picker-clear',
onClick: this.clearSelection
}) : null;
return _react2["default"].createElement(
'span',
{ className: props.pickerClass, style: style },
_react2["default"].createElement(
_Picker2["default"],
_extends({
formatter: props.getFormatter(),
transitionName: transitionName,
disabled: disabled,
calendar: calendar,
value: state.value,
prefixCls: 'ant-calendar-picker-container',
style: popupStyle,
align: align,
getCalendarContainer: getCalendarContainer,
onOpen: props.toggleOpen,
onClose: props.toggleOpen
}, pickerChangeHandler),
function (_ref) {
var value = _ref.value;
var start = value[0];
var end = value[1];
return _react2["default"].createElement(
'span',
{ className: props.pickerInputClass, disabled: disabled },
_react2["default"].createElement('input', {
disabled: disabled,
readOnly: true,
value: start ? props.getFormatter().format(start) : '',
placeholder: startPlaceholder,
className: 'ant-calendar-range-picker-input'
}),
_react2["default"].createElement(
'span',
{ className: 'ant-calendar-range-picker-separator' },
' ~ '
),
_react2["default"].createElement('input', {
disabled: disabled,
readOnly: true,
value: end ? props.getFormatter().format(end) : '',
placeholder: endPlaceholder,
className: 'ant-calendar-range-picker-input'
}),
clearIcon,
_react2["default"].createElement('span', { className: 'ant-calendar-picker-icon' })
);
}
)
);
};
return RangePicker;
}(_react2["default"].Component), _class.defaultProps = {
defaultValue: []
}, _initialiseProps = function _initialiseProps() {
var _this2 = this;
this.clearSelection = function (e) {
e.preventDefault();
e.stopPropagation();
_this2.setState({ value: [] });
_this2.handleChange([]);
};
this.handleChange = function (value) {
var props = _this2.props;
if (!('value' in props)) {
_this2.setState({ value: value });
}
var startDate = value[0] ? new Date(value[0].getTime()) : null;
var endDate = value[1] ? new Date(value[1].getTime()) : null;
var startDateString = value[0] ? props.getFormatter().format(value[0]) : '';
var endDateString = value[1] ? props.getFormatter().format(value[1]) : '';
props.onChange([startDate, endDate], [startDateString, endDateString]);
};
}, _temp);
exports["default"] = RangePicker;
module.exports = exports['default'];
|
from .user import UserSerializer
from .branch import BranchSerializer
from .branch_schedule import BranchScheduleSerializer
__all__ = ['UserSerializer', 'BranchSerializer', 'BranchScheduleSerializer']
|
var searchData=
[
['info_5farch_29',['info_arch',['../_c_make_c_compiler_id_8c.html#a59647e99d304ed33b15cb284c27ed391',1,'info_arch(): CMakeCCompilerId.c'],['../_c_make_c_x_x_compiler_id_8cpp.html#a59647e99d304ed33b15cb284c27ed391',1,'info_arch(): CMakeCXXCompilerId.cpp']]],
['info_5fcompiler_30',['info_compiler',['../_c_make_c_compiler_id_8c.html#a4b0efeb7a5d59313986b3a0390f050f6',1,'info_compiler(): CMakeCCompilerId.c'],['../_c_make_c_x_x_compiler_id_8cpp.html#a4b0efeb7a5d59313986b3a0390f050f6',1,'info_compiler(): CMakeCXXCompilerId.cpp']]],
['info_5flanguage_5fdialect_5fdefault_31',['info_language_dialect_default',['../_c_make_c_compiler_id_8c.html#a1ce162bad2fe6966ac8b33cc19e120b8',1,'info_language_dialect_default(): CMakeCCompilerId.c'],['../_c_make_c_x_x_compiler_id_8cpp.html#a1ce162bad2fe6966ac8b33cc19e120b8',1,'info_language_dialect_default(): CMakeCXXCompilerId.cpp']]],
['info_5fplatform_32',['info_platform',['../_c_make_c_compiler_id_8c.html#a2321403dee54ee23f0c2fa849c60f7d4',1,'info_platform(): CMakeCCompilerId.c'],['../_c_make_c_x_x_compiler_id_8cpp.html#a2321403dee54ee23f0c2fa849c60f7d4',1,'info_platform(): CMakeCXXCompilerId.cpp']]],
['iperiodicjob_33',['IPeriodicJob',['../class_i_periodic_job.html',1,'']]],
['iperiodicjob_2eh_34',['IPeriodicJob.h',['../_i_periodic_job_8h.html',1,'']]],
['iperiodicjobcontroller_35',['IPeriodicJobController',['../class_i_periodic_job_controller.html',1,'']]],
['iperiodicjobcontroller_2eh_36',['IPeriodicJobController.h',['../_i_periodic_job_controller_8h.html',1,'']]]
];
|
//
// Generated by class-dump 3.5 (64 bit) (Debug version compiled Mar 11 2021 20:53:35).
//
// Copyright (C) 1997-2019 Steve Nygard.
//
#import <Flexo/FFShareDestination.h>
@interface FFShareLoadingDestination : FFShareDestination
{
}
+ (id)sharedShareLoadingDestination;
- (BOOL)isEditable;
- (void)dealloc;
- (id)iconTitle;
- (id)action;
- (id)copyWithZone:(struct _NSZone *)arg1;
- (void)encodeWithCoder:(id)arg1;
- (id)initWithCoder:(id)arg1;
- (id)initWithName:(id)arg1;
- (id)init;
- (id)_initSingleton;
@end
|
/*
Copyright (c) 2003-2013, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.html or http://ckeditor.com/license
*/
CKEDITOR.dialog.add( 'checkbox', function( editor )
{
return {
title : editor.lang.checkboxAndRadio.checkboxTitle,
minWidth : 350,
minHeight : 140,
onShow : function()
{
delete this.checkbox;
var element = this.getParentEditor().getSelection().getSelectedElement();
if ( element && element.getAttribute( 'type' ) == 'checkbox' )
{
this.checkbox = element;
this.setupContent( element );
}
},
onOk : function()
{
var editor,
element = this.checkbox,
isInsertMode = !element;
if ( isInsertMode )
{
editor = this.getParentEditor();
element = editor.document.createElement( 'input' );
element.setAttribute( 'type', 'checkbox' );
editor.insertElement( element );
}
this.commitContent( { element : element } );
},
contents : [
{
id : 'info',
label : editor.lang.checkboxAndRadio.checkboxTitle,
title : editor.lang.checkboxAndRadio.checkboxTitle,
startupFocus : 'txtName',
elements : [
{
id : 'txtName',
type : 'text',
label : editor.lang.common.name,
'default' : '',
accessKey : 'N',
setup : function( element )
{
this.setValue(
element.data( 'cke-saved-name' ) ||
element.getAttribute( 'name' ) ||
'' );
},
commit : function( data )
{
var element = data.element;
// IE failed to update 'name' property on input elements, protect it now.
if ( this.getValue() )
element.data( 'cke-saved-name', this.getValue() );
else
{
element.data( 'cke-saved-name', false );
element.removeAttribute( 'name' );
}
}
},
{
id : 'txtValue',
type : 'text',
label : editor.lang.checkboxAndRadio.value,
'default' : '',
accessKey : 'V',
setup : function( element )
{
var value = element.getAttribute( 'value' );
// IE Return 'on' as default attr value.
this.setValue( CKEDITOR.env.ie && value == 'on' ? '' : value );
},
commit : function( data )
{
var element = data.element,
value = this.getValue();
if ( value && !( CKEDITOR.env.ie && value == 'on' ) )
element.setAttribute( 'value', value );
else
{
if ( CKEDITOR.env.ie )
{
// Remove attribute 'value' of checkbox (#4721).
var checkbox = new CKEDITOR.dom.element( 'input', element.getDocument() );
element.copyAttributes( checkbox, { value: 1 } );
checkbox.replace( element );
editor.getSelection().selectElement( checkbox );
data.element = checkbox;
}
else
element.removeAttribute( 'value' );
}
}
},
{
id : 'cmbSelected',
type : 'checkbox',
label : editor.lang.checkboxAndRadio.selected,
'default' : '',
accessKey : 'S',
value : "checked",
setup : function( element )
{
this.setValue( element.getAttribute( 'checked' ) );
},
commit : function( data )
{
var element = data.element;
if ( CKEDITOR.env.ie )
{
var isElementChecked = !!element.getAttribute( 'checked' ),
isChecked = !!this.getValue();
if ( isElementChecked != isChecked )
{
var replace = CKEDITOR.dom.element.createFromHtml( '<input type="checkbox"'
+ ( isChecked ? ' checked="checked"' : '' )
+ '/>', editor.document );
element.copyAttributes( replace, { type : 1, checked : 1 } );
replace.replace( element );
editor.getSelection().selectElement( replace );
data.element = replace;
}
}
else
{
var value = this.getValue();
if ( value )
element.setAttribute( 'checked', 'checked' );
else
element.removeAttribute( 'checked' );
}
}
}
]
}
]
};
});
|
# flake8: noqa
from typing import Dict, List, Any, Optional, cast, TYPE_CHECKING
import pystac
from pystac.serialization.identify import STACVersionID, identify_stac_object
from pystac.validation.schema_uri_map import OldExtensionSchemaUriMap
from pystac.utils import make_absolute_href
if TYPE_CHECKING:
from pystac.stac_object import STACObject as STACObject_Type
from pystac.stac_object import STACObjectType as STACObjectType_Type
# Import after above class definition
from pystac.validation.stac_validator import STACValidator, JsonSchemaSTACValidator
def validate(stac_object: "STACObject_Type") -> List[Any]:
"""Validates a :class:`~pystac.STACObject`.
Args:
stac_object : The stac object to validate.
Returns:
List[Object]: List of return values from the validation calls for the
core object and any extensions. Element type is specific to the
STACValidator implementation.
Raises:
STACValidationError
"""
return validate_dict(
stac_dict=stac_object.to_dict(),
stac_object_type=stac_object.STAC_OBJECT_TYPE,
stac_version=pystac.get_stac_version(),
extensions=stac_object.stac_extensions,
href=stac_object.get_self_href(),
)
def validate_dict(
stac_dict: Dict[str, Any],
stac_object_type: Optional["STACObjectType_Type"] = None,
stac_version: Optional[str] = None,
extensions: Optional[List[str]] = None,
href: Optional[str] = None,
) -> List[Any]:
"""Validate a stac object serialized as JSON into a dict.
This method delegates to the call to :meth:`pystac.validation.STACValidator.validate`
for the STACValidator registered via :meth:`~pystac.validation.set_validator` or
:class:`~pystac.validation.JsonSchemaSTACValidator` by default.
Args:
stac_dict : Dictionary that is the STAC json of the object.
stac_object_type : The stac object type of the object encoded in stac_dict.
One of :class:`~pystac.STACObjectType`. If not supplied, this will use
PySTAC's identification logic to identify the object type.
stac_version : The version of STAC to validate the object against. If not supplied,
this will use PySTAC's identification logic to identify the stac version
extensions : Extension IDs for this stac object. If not supplied,
PySTAC's identification logic to identify the extensions.
href : Optional HREF of the STAC object being validated.
Returns:
List[Object]: List of return values from the validation calls for the
core object and any extensions. Element type is specific to the
STACValidator implementation.
Raises:
STACValidationError
"""
info = None
if stac_object_type is None:
info = identify_stac_object(stac_dict)
stac_object_type = info.object_type
if stac_version is None:
if info is None:
info = identify_stac_object(stac_dict)
stac_version = str(info.version_range.latest_valid_version())
if extensions is None:
if info is None:
info = identify_stac_object(stac_dict)
extensions = list(info.extensions)
stac_version_id = STACVersionID(stac_version)
# If the version is before 1.0.0-rc.1, substitute extension short IDs for
# their schemas.
if stac_version_id < "1.0.0-rc.1":
def _get_uri(ext: str) -> Optional[str]:
return OldExtensionSchemaUriMap.get_extension_schema_uri(
ext,
stac_object_type, # type:ignore
stac_version_id,
)
extensions = [uri for uri in map(_get_uri, extensions) if uri is not None]
return RegisteredValidator.get_validator().validate(
stac_dict, stac_object_type, stac_version, extensions, href
)
def validate_all(
stac_dict: Dict[str, Any], href: str, stac_io: Optional[pystac.StacIO] = None
) -> None:
"""Validate STAC JSON and all contained catalogs, collections and items.
If this stac_dict represents a catalog or collection, this method will
recursively be called for each child link and all contained items.
Args:
stac_dict : Dictionary that is the STAC json of the object.
href : HREF of the STAC object being validated. Used for error
reporting and resolving relative links.
stac_io: Optional StacIO instance to use for reading hrefs. If None,
the StacIO.default() instance is used.
Raises:
STACValidationError: This will raise a STACValidationError if this or any contained
catalog, collection or item has a validation error.
"""
if stac_io is None:
stac_io = pystac.StacIO.default()
info = identify_stac_object(stac_dict)
# Validate this object
validate_dict(
stac_dict,
stac_object_type=info.object_type,
stac_version=str(info.version_range.latest_valid_version()),
extensions=list(info.extensions),
href=href,
)
if info.object_type != pystac.STACObjectType.ITEM:
if "links" in stac_dict:
# Account for 0.6 links
if isinstance(stac_dict["links"], dict):
links: List[Dict[str, Any]] = list(stac_dict["links"].values())
else:
links = cast(List[Dict[str, Any]], stac_dict.get("links"))
for link in links:
rel = link.get("rel")
if rel in [pystac.RelType.ITEM, pystac.RelType.CHILD]:
link_href = make_absolute_href(
cast(str, link.get("href")), start_href=href
)
if link_href is not None:
d = stac_io.read_json(link_href)
validate_all(d, link_href)
class RegisteredValidator:
_validator: Optional[STACValidator] = None
@classmethod
def get_validator(cls) -> STACValidator:
if cls._validator is None:
try:
import jsonschema # type:ignore
except ImportError:
raise Exception(
'Cannot validate with default validator because package "jsonschema" '
"is not installed. Install pystac with the validation optional requirements "
"(e.g. pip install pystac[validation]) to install jsonschema"
)
cls._validator = JsonSchemaSTACValidator()
return cls._validator
@classmethod
def set_validator(cls, validator: STACValidator) -> None:
if not issubclass(type(validator), STACValidator):
raise Exception("Validator must be a subclass of {}".format(STACValidator))
cls._validator = validator
def set_validator(validator: STACValidator) -> None:
"""Sets the STACValidator to use in PySTAC.
Args:
validator : The STACValidator implementation to use for
validation.
"""
RegisteredValidator.set_validator(validator)
|
/**
* @license Angular v7.2.13
* (c) 2010-2019 Google LLC. https://angular.io/
* License: MIT
*/
import { NoopAnimationPlayer, ɵAnimationGroupPlayer, ɵPRE_STYLE, AUTO_STYLE, sequence, style } from '@angular/animations';
import { Injectable } from '@angular/core';
/**
* @fileoverview added by tsickle
* @suppress {checkTypes,extraRequire,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
/**
* @return {?}
*/
function isBrowser() {
return (typeof window !== 'undefined' && typeof window.document !== 'undefined');
}
/**
* @return {?}
*/
function isNode() {
return (typeof process !== 'undefined');
}
/**
* @param {?} players
* @return {?}
*/
function optimizeGroupPlayer(players) {
switch (players.length) {
case 0:
return new NoopAnimationPlayer();
case 1:
return players[0];
default:
return new ɵAnimationGroupPlayer(players);
}
}
/**
* @param {?} driver
* @param {?} normalizer
* @param {?} element
* @param {?} keyframes
* @param {?=} preStyles
* @param {?=} postStyles
* @return {?}
*/
function normalizeKeyframes(driver, normalizer, element, keyframes, preStyles = {}, postStyles = {}) {
/** @type {?} */
const errors = [];
/** @type {?} */
const normalizedKeyframes = [];
/** @type {?} */
let previousOffset = -1;
/** @type {?} */
let previousKeyframe = null;
keyframes.forEach(kf => {
/** @type {?} */
const offset = (/** @type {?} */ (kf['offset']));
/** @type {?} */
const isSameOffset = offset == previousOffset;
/** @type {?} */
const normalizedKeyframe = (isSameOffset && previousKeyframe) || {};
Object.keys(kf).forEach(prop => {
/** @type {?} */
let normalizedProp = prop;
/** @type {?} */
let normalizedValue = kf[prop];
if (prop !== 'offset') {
normalizedProp = normalizer.normalizePropertyName(normalizedProp, errors);
switch (normalizedValue) {
case ɵPRE_STYLE:
normalizedValue = preStyles[prop];
break;
case AUTO_STYLE:
normalizedValue = postStyles[prop];
break;
default:
normalizedValue =
normalizer.normalizeStyleValue(prop, normalizedProp, normalizedValue, errors);
break;
}
}
normalizedKeyframe[normalizedProp] = normalizedValue;
});
if (!isSameOffset) {
normalizedKeyframes.push(normalizedKeyframe);
}
previousKeyframe = normalizedKeyframe;
previousOffset = offset;
});
if (errors.length) {
/** @type {?} */
const LINE_START = '\n - ';
throw new Error(`Unable to animate due to the following errors:${LINE_START}${errors.join(LINE_START)}`);
}
return normalizedKeyframes;
}
/**
* @param {?} player
* @param {?} eventName
* @param {?} event
* @param {?} callback
* @return {?}
*/
function listenOnPlayer(player, eventName, event, callback) {
switch (eventName) {
case 'start':
player.onStart(() => callback(event && copyAnimationEvent(event, 'start', player)));
break;
case 'done':
player.onDone(() => callback(event && copyAnimationEvent(event, 'done', player)));
break;
case 'destroy':
player.onDestroy(() => callback(event && copyAnimationEvent(event, 'destroy', player)));
break;
}
}
/**
* @param {?} e
* @param {?} phaseName
* @param {?} player
* @return {?}
*/
function copyAnimationEvent(e, phaseName, player) {
/** @type {?} */
const totalTime = player.totalTime;
/** @type {?} */
const disabled = ((/** @type {?} */ (player))).disabled ? true : false;
/** @type {?} */
const event = makeAnimationEvent(e.element, e.triggerName, e.fromState, e.toState, phaseName || e.phaseName, totalTime == undefined ? e.totalTime : totalTime, disabled);
/** @type {?} */
const data = ((/** @type {?} */ (e)))['_data'];
if (data != null) {
((/** @type {?} */ (event)))['_data'] = data;
}
return event;
}
/**
* @param {?} element
* @param {?} triggerName
* @param {?} fromState
* @param {?} toState
* @param {?=} phaseName
* @param {?=} totalTime
* @param {?=} disabled
* @return {?}
*/
function makeAnimationEvent(element, triggerName, fromState, toState, phaseName = '', totalTime = 0, disabled) {
return { element, triggerName, fromState, toState, phaseName, totalTime, disabled: !!disabled };
}
/**
* @param {?} map
* @param {?} key
* @param {?} defaultValue
* @return {?}
*/
function getOrSetAsInMap(map, key, defaultValue) {
/** @type {?} */
let value;
if (map instanceof Map) {
value = map.get(key);
if (!value) {
map.set(key, value = defaultValue);
}
}
else {
value = map[key];
if (!value) {
value = map[key] = defaultValue;
}
}
return value;
}
/**
* @param {?} command
* @return {?}
*/
function parseTimelineCommand(command) {
/** @type {?} */
const separatorPos = command.indexOf(':');
/** @type {?} */
const id = command.substring(1, separatorPos);
/** @type {?} */
const action = command.substr(separatorPos + 1);
return [id, action];
}
/** @type {?} */
let _contains = (elm1, elm2) => false;
/** @type {?} */
let _matches = (element, selector) => false;
/** @type {?} */
let _query = (element, selector, multi) => {
return [];
};
// Define utility methods for browsers and platform-server(domino) where Element
// and utility methods exist.
/** @type {?} */
const _isNode = isNode();
if (_isNode || typeof Element !== 'undefined') {
// this is well supported in all browsers
_contains = (elm1, elm2) => { return (/** @type {?} */ (elm1.contains(elm2))); };
if (_isNode || Element.prototype.matches) {
_matches = (element, selector) => element.matches(selector);
}
else {
/** @type {?} */
const proto = (/** @type {?} */ (Element.prototype));
/** @type {?} */
const fn = proto.matchesSelector || proto.mozMatchesSelector || proto.msMatchesSelector ||
proto.oMatchesSelector || proto.webkitMatchesSelector;
if (fn) {
_matches = (element, selector) => fn.apply(element, [selector]);
}
}
_query = (element, selector, multi) => {
/** @type {?} */
let results = [];
if (multi) {
results.push(...element.querySelectorAll(selector));
}
else {
/** @type {?} */
const elm = element.querySelector(selector);
if (elm) {
results.push(elm);
}
}
return results;
};
}
/**
* @param {?} prop
* @return {?}
*/
function containsVendorPrefix(prop) {
// Webkit is the only real popular vendor prefix nowadays
// cc: http://shouldiprefix.com/
return prop.substring(1, 6) == 'ebkit'; // webkit or Webkit
}
/** @type {?} */
let _CACHED_BODY = null;
/** @type {?} */
let _IS_WEBKIT = false;
/**
* @param {?} prop
* @return {?}
*/
function validateStyleProperty(prop) {
if (!_CACHED_BODY) {
_CACHED_BODY = getBodyNode() || {};
_IS_WEBKIT = (/** @type {?} */ (_CACHED_BODY)).style ? ('WebkitAppearance' in (/** @type {?} */ (_CACHED_BODY)).style) : false;
}
/** @type {?} */
let result = true;
if ((/** @type {?} */ (_CACHED_BODY)).style && !containsVendorPrefix(prop)) {
result = prop in (/** @type {?} */ (_CACHED_BODY)).style;
if (!result && _IS_WEBKIT) {
/** @type {?} */
const camelProp = 'Webkit' + prop.charAt(0).toUpperCase() + prop.substr(1);
result = camelProp in (/** @type {?} */ (_CACHED_BODY)).style;
}
}
return result;
}
/**
* @return {?}
*/
function getBodyNode() {
if (typeof document != 'undefined') {
return document.body;
}
return null;
}
/** @type {?} */
const matchesElement = _matches;
/** @type {?} */
const containsElement = _contains;
/** @type {?} */
const invokeQuery = _query;
/**
* @param {?} object
* @return {?}
*/
function hypenatePropsObject(object) {
/** @type {?} */
const newObj = {};
Object.keys(object).forEach(prop => {
/** @type {?} */
const newProp = prop.replace(/([a-z])([A-Z])/g, '$1-$2');
newObj[newProp] = object[prop];
});
return newObj;
}
/**
* @fileoverview added by tsickle
* @suppress {checkTypes,extraRequire,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
/**
* \@publicApi
*/
class NoopAnimationDriver {
/**
* @param {?} prop
* @return {?}
*/
validateStyleProperty(prop) { return validateStyleProperty(prop); }
/**
* @param {?} element
* @param {?} selector
* @return {?}
*/
matchesElement(element, selector) {
return matchesElement(element, selector);
}
/**
* @param {?} elm1
* @param {?} elm2
* @return {?}
*/
containsElement(elm1, elm2) { return containsElement(elm1, elm2); }
/**
* @param {?} element
* @param {?} selector
* @param {?} multi
* @return {?}
*/
query(element, selector, multi) {
return invokeQuery(element, selector, multi);
}
/**
* @param {?} element
* @param {?} prop
* @param {?=} defaultValue
* @return {?}
*/
computeStyle(element, prop, defaultValue) {
return defaultValue || '';
}
/**
* @param {?} element
* @param {?} keyframes
* @param {?} duration
* @param {?} delay
* @param {?} easing
* @param {?=} previousPlayers
* @param {?=} scrubberAccessRequested
* @return {?}
*/
animate(element, keyframes, duration, delay, easing, previousPlayers = [], scrubberAccessRequested) {
return new NoopAnimationPlayer(duration, delay);
}
}
NoopAnimationDriver.decorators = [
{ type: Injectable }
];
/**
* \@publicApi
* @abstract
*/
class AnimationDriver {
}
AnimationDriver.NOOP = new NoopAnimationDriver();
/**
* @fileoverview added by tsickle
* @suppress {checkTypes,extraRequire,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
/** @type {?} */
const ONE_SECOND = 1000;
/** @type {?} */
const SUBSTITUTION_EXPR_START = '{{';
/** @type {?} */
const SUBSTITUTION_EXPR_END = '}}';
/** @type {?} */
const ENTER_CLASSNAME = 'ng-enter';
/** @type {?} */
const LEAVE_CLASSNAME = 'ng-leave';
/** @type {?} */
const NG_TRIGGER_CLASSNAME = 'ng-trigger';
/** @type {?} */
const NG_TRIGGER_SELECTOR = '.ng-trigger';
/** @type {?} */
const NG_ANIMATING_CLASSNAME = 'ng-animating';
/** @type {?} */
const NG_ANIMATING_SELECTOR = '.ng-animating';
/**
* @param {?} value
* @return {?}
*/
function resolveTimingValue(value) {
if (typeof value == 'number')
return value;
/** @type {?} */
const matches = ((/** @type {?} */ (value))).match(/^(-?[\.\d]+)(m?s)/);
if (!matches || matches.length < 2)
return 0;
return _convertTimeValueToMS(parseFloat(matches[1]), matches[2]);
}
/**
* @param {?} value
* @param {?} unit
* @return {?}
*/
function _convertTimeValueToMS(value, unit) {
switch (unit) {
case 's':
return value * ONE_SECOND;
default: // ms or something else
return value;
}
}
/**
* @param {?} timings
* @param {?} errors
* @param {?=} allowNegativeValues
* @return {?}
*/
function resolveTiming(timings, errors, allowNegativeValues) {
return timings.hasOwnProperty('duration') ?
(/** @type {?} */ (timings)) :
parseTimeExpression((/** @type {?} */ (timings)), errors, allowNegativeValues);
}
/**
* @param {?} exp
* @param {?} errors
* @param {?=} allowNegativeValues
* @return {?}
*/
function parseTimeExpression(exp, errors, allowNegativeValues) {
/** @type {?} */
const regex = /^(-?[\.\d]+)(m?s)(?:\s+(-?[\.\d]+)(m?s))?(?:\s+([-a-z]+(?:\(.+?\))?))?$/i;
/** @type {?} */
let duration;
/** @type {?} */
let delay = 0;
/** @type {?} */
let easing = '';
if (typeof exp === 'string') {
/** @type {?} */
const matches = exp.match(regex);
if (matches === null) {
errors.push(`The provided timing value "${exp}" is invalid.`);
return { duration: 0, delay: 0, easing: '' };
}
duration = _convertTimeValueToMS(parseFloat(matches[1]), matches[2]);
/** @type {?} */
const delayMatch = matches[3];
if (delayMatch != null) {
delay = _convertTimeValueToMS(parseFloat(delayMatch), matches[4]);
}
/** @type {?} */
const easingVal = matches[5];
if (easingVal) {
easing = easingVal;
}
}
else {
duration = (/** @type {?} */ (exp));
}
if (!allowNegativeValues) {
/** @type {?} */
let containsErrors = false;
/** @type {?} */
let startIndex = errors.length;
if (duration < 0) {
errors.push(`Duration values below 0 are not allowed for this animation step.`);
containsErrors = true;
}
if (delay < 0) {
errors.push(`Delay values below 0 are not allowed for this animation step.`);
containsErrors = true;
}
if (containsErrors) {
errors.splice(startIndex, 0, `The provided timing value "${exp}" is invalid.`);
}
}
return { duration, delay, easing };
}
/**
* @param {?} obj
* @param {?=} destination
* @return {?}
*/
function copyObj(obj, destination = {}) {
Object.keys(obj).forEach(prop => { destination[prop] = obj[prop]; });
return destination;
}
/**
* @param {?} styles
* @return {?}
*/
function normalizeStyles(styles) {
/** @type {?} */
const normalizedStyles = {};
if (Array.isArray(styles)) {
styles.forEach(data => copyStyles(data, false, normalizedStyles));
}
else {
copyStyles(styles, false, normalizedStyles);
}
return normalizedStyles;
}
/**
* @param {?} styles
* @param {?} readPrototype
* @param {?=} destination
* @return {?}
*/
function copyStyles(styles, readPrototype, destination = {}) {
if (readPrototype) {
// we make use of a for-in loop so that the
// prototypically inherited properties are
// revealed from the backFill map
for (let prop in styles) {
destination[prop] = styles[prop];
}
}
else {
copyObj(styles, destination);
}
return destination;
}
/**
* @param {?} element
* @param {?} key
* @param {?} value
* @return {?}
*/
function getStyleAttributeString(element, key, value) {
// Return the key-value pair string to be added to the style attribute for the
// given CSS style key.
if (value) {
return key + ':' + value + ';';
}
else {
return '';
}
}
/**
* @param {?} element
* @return {?}
*/
function writeStyleAttribute(element) {
// Read the style property of the element and manually reflect it to the
// style attribute. This is needed because Domino on platform-server doesn't
// understand the full set of allowed CSS properties and doesn't reflect some
// of them automatically.
/** @type {?} */
let styleAttrValue = '';
for (let i = 0; i < element.style.length; i++) {
/** @type {?} */
const key = element.style.item(i);
styleAttrValue += getStyleAttributeString(element, key, element.style.getPropertyValue(key));
}
for (const key in element.style) {
// Skip internal Domino properties that don't need to be reflected.
if (!element.style.hasOwnProperty(key) || key.startsWith('_')) {
continue;
}
/** @type {?} */
const dashKey = camelCaseToDashCase(key);
styleAttrValue += getStyleAttributeString(element, dashKey, element.style[key]);
}
element.setAttribute('style', styleAttrValue);
}
/**
* @param {?} element
* @param {?} styles
* @param {?=} formerStyles
* @return {?}
*/
function setStyles(element, styles, formerStyles) {
if (element['style']) {
Object.keys(styles).forEach(prop => {
/** @type {?} */
const camelProp = dashCaseToCamelCase(prop);
if (formerStyles && !formerStyles.hasOwnProperty(prop)) {
formerStyles[prop] = element.style[camelProp];
}
element.style[camelProp] = styles[prop];
});
// On the server set the 'style' attribute since it's not automatically reflected.
if (isNode()) {
writeStyleAttribute(element);
}
}
}
/**
* @param {?} element
* @param {?} styles
* @return {?}
*/
function eraseStyles(element, styles) {
if (element['style']) {
Object.keys(styles).forEach(prop => {
/** @type {?} */
const camelProp = dashCaseToCamelCase(prop);
element.style[camelProp] = '';
});
// On the server set the 'style' attribute since it's not automatically reflected.
if (isNode()) {
writeStyleAttribute(element);
}
}
}
/**
* @param {?} steps
* @return {?}
*/
function normalizeAnimationEntry(steps) {
if (Array.isArray(steps)) {
if (steps.length == 1)
return steps[0];
return sequence(steps);
}
return (/** @type {?} */ (steps));
}
/**
* @param {?} value
* @param {?} options
* @param {?} errors
* @return {?}
*/
function validateStyleParams(value, options, errors) {
/** @type {?} */
const params = options.params || {};
/** @type {?} */
const matches = extractStyleParams(value);
if (matches.length) {
matches.forEach(varName => {
if (!params.hasOwnProperty(varName)) {
errors.push(`Unable to resolve the local animation param ${varName} in the given list of values`);
}
});
}
}
/** @type {?} */
const PARAM_REGEX = new RegExp(`${SUBSTITUTION_EXPR_START}\\s*(.+?)\\s*${SUBSTITUTION_EXPR_END}`, 'g');
/**
* @param {?} value
* @return {?}
*/
function extractStyleParams(value) {
/** @type {?} */
let params = [];
if (typeof value === 'string') {
/** @type {?} */
const val = value.toString();
/** @type {?} */
let match;
while (match = PARAM_REGEX.exec(val)) {
params.push((/** @type {?} */ (match[1])));
}
PARAM_REGEX.lastIndex = 0;
}
return params;
}
/**
* @param {?} value
* @param {?} params
* @param {?} errors
* @return {?}
*/
function interpolateParams(value, params, errors) {
/** @type {?} */
const original = value.toString();
/** @type {?} */
const str = original.replace(PARAM_REGEX, (_, varName) => {
/** @type {?} */
let localVal = params[varName];
// this means that the value was never overridden by the data passed in by the user
if (!params.hasOwnProperty(varName)) {
errors.push(`Please provide a value for the animation param ${varName}`);
localVal = '';
}
return localVal.toString();
});
// we do this to assert that numeric values stay as they are
return str == original ? value : str;
}
/**
* @param {?} iterator
* @return {?}
*/
function iteratorToArray(iterator) {
/** @type {?} */
const arr = [];
/** @type {?} */
let item = iterator.next();
while (!item.done) {
arr.push(item.value);
item = iterator.next();
}
return arr;
}
/** @type {?} */
const DASH_CASE_REGEXP = /-+([a-z0-9])/g;
/**
* @param {?} input
* @return {?}
*/
function dashCaseToCamelCase(input) {
return input.replace(DASH_CASE_REGEXP, (...m) => m[1].toUpperCase());
}
/**
* @param {?} input
* @return {?}
*/
function camelCaseToDashCase(input) {
return input.replace(/([a-z])([A-Z])/g, '$1-$2').toLowerCase();
}
/**
* @param {?} duration
* @param {?} delay
* @return {?}
*/
function allowPreviousPlayerStylesMerge(duration, delay) {
return duration === 0 || delay === 0;
}
/**
* @param {?} element
* @param {?} keyframes
* @param {?} previousStyles
* @return {?}
*/
function balancePreviousStylesIntoKeyframes(element, keyframes, previousStyles) {
/** @type {?} */
const previousStyleProps = Object.keys(previousStyles);
if (previousStyleProps.length && keyframes.length) {
/** @type {?} */
let startingKeyframe = keyframes[0];
/** @type {?} */
let missingStyleProps = [];
previousStyleProps.forEach(prop => {
if (!startingKeyframe.hasOwnProperty(prop)) {
missingStyleProps.push(prop);
}
startingKeyframe[prop] = previousStyles[prop];
});
if (missingStyleProps.length) {
// tslint:disable-next-line
for (var i = 1; i < keyframes.length; i++) {
/** @type {?} */
let kf = keyframes[i];
missingStyleProps.forEach(function (prop) { kf[prop] = computeStyle(element, prop); });
}
}
}
return keyframes;
}
/**
* @param {?} visitor
* @param {?} node
* @param {?} context
* @return {?}
*/
function visitDslNode(visitor, node, context) {
switch (node.type) {
case 7 /* Trigger */:
return visitor.visitTrigger(node, context);
case 0 /* State */:
return visitor.visitState(node, context);
case 1 /* Transition */:
return visitor.visitTransition(node, context);
case 2 /* Sequence */:
return visitor.visitSequence(node, context);
case 3 /* Group */:
return visitor.visitGroup(node, context);
case 4 /* Animate */:
return visitor.visitAnimate(node, context);
case 5 /* Keyframes */:
return visitor.visitKeyframes(node, context);
case 6 /* Style */:
return visitor.visitStyle(node, context);
case 8 /* Reference */:
return visitor.visitReference(node, context);
case 9 /* AnimateChild */:
return visitor.visitAnimateChild(node, context);
case 10 /* AnimateRef */:
return visitor.visitAnimateRef(node, context);
case 11 /* Query */:
return visitor.visitQuery(node, context);
case 12 /* Stagger */:
return visitor.visitStagger(node, context);
default:
throw new Error(`Unable to resolve animation metadata node #${node.type}`);
}
}
/**
* @param {?} element
* @param {?} prop
* @return {?}
*/
function computeStyle(element, prop) {
return ((/** @type {?} */ (window.getComputedStyle(element))))[prop];
}
/**
* @fileoverview added by tsickle
* @suppress {checkTypes,extraRequire,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
* @type {?}
*/
const ANY_STATE = '*';
/**
* @param {?} transitionValue
* @param {?} errors
* @return {?}
*/
function parseTransitionExpr(transitionValue, errors) {
/** @type {?} */
const expressions = [];
if (typeof transitionValue == 'string') {
((/** @type {?} */ (transitionValue)))
.split(/\s*,\s*/)
.forEach(str => parseInnerTransitionStr(str, expressions, errors));
}
else {
expressions.push((/** @type {?} */ (transitionValue)));
}
return expressions;
}
/**
* @param {?} eventStr
* @param {?} expressions
* @param {?} errors
* @return {?}
*/
function parseInnerTransitionStr(eventStr, expressions, errors) {
if (eventStr[0] == ':') {
/** @type {?} */
const result = parseAnimationAlias(eventStr, errors);
if (typeof result == 'function') {
expressions.push(result);
return;
}
eventStr = (/** @type {?} */ (result));
}
/** @type {?} */
const match = eventStr.match(/^(\*|[-\w]+)\s*(<?[=-]>)\s*(\*|[-\w]+)$/);
if (match == null || match.length < 4) {
errors.push(`The provided transition expression "${eventStr}" is not supported`);
return expressions;
}
/** @type {?} */
const fromState = match[1];
/** @type {?} */
const separator = match[2];
/** @type {?} */
const toState = match[3];
expressions.push(makeLambdaFromStates(fromState, toState));
/** @type {?} */
const isFullAnyStateExpr = fromState == ANY_STATE && toState == ANY_STATE;
if (separator[0] == '<' && !isFullAnyStateExpr) {
expressions.push(makeLambdaFromStates(toState, fromState));
}
}
/**
* @param {?} alias
* @param {?} errors
* @return {?}
*/
function parseAnimationAlias(alias, errors) {
switch (alias) {
case ':enter':
return 'void => *';
case ':leave':
return '* => void';
case ':increment':
return (fromState, toState) => parseFloat(toState) > parseFloat(fromState);
case ':decrement':
return (fromState, toState) => parseFloat(toState) < parseFloat(fromState);
default:
errors.push(`The transition alias value "${alias}" is not supported`);
return '* => *';
}
}
// DO NOT REFACTOR ... keep the follow set instantiations
// with the values intact (closure compiler for some reason
// removes follow-up lines that add the values outside of
// the constructor...
/** @type {?} */
const TRUE_BOOLEAN_VALUES = new Set(['true', '1']);
/** @type {?} */
const FALSE_BOOLEAN_VALUES = new Set(['false', '0']);
/**
* @param {?} lhs
* @param {?} rhs
* @return {?}
*/
function makeLambdaFromStates(lhs, rhs) {
/** @type {?} */
const LHS_MATCH_BOOLEAN = TRUE_BOOLEAN_VALUES.has(lhs) || FALSE_BOOLEAN_VALUES.has(lhs);
/** @type {?} */
const RHS_MATCH_BOOLEAN = TRUE_BOOLEAN_VALUES.has(rhs) || FALSE_BOOLEAN_VALUES.has(rhs);
return (fromState, toState) => {
/** @type {?} */
let lhsMatch = lhs == ANY_STATE || lhs == fromState;
/** @type {?} */
let rhsMatch = rhs == ANY_STATE || rhs == toState;
if (!lhsMatch && LHS_MATCH_BOOLEAN && typeof fromState === 'boolean') {
lhsMatch = fromState ? TRUE_BOOLEAN_VALUES.has(lhs) : FALSE_BOOLEAN_VALUES.has(lhs);
}
if (!rhsMatch && RHS_MATCH_BOOLEAN && typeof toState === 'boolean') {
rhsMatch = toState ? TRUE_BOOLEAN_VALUES.has(rhs) : FALSE_BOOLEAN_VALUES.has(rhs);
}
return lhsMatch && rhsMatch;
};
}
/**
* @fileoverview added by tsickle
* @suppress {checkTypes,extraRequire,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
/** @type {?} */
const SELF_TOKEN = ':self';
/** @type {?} */
const SELF_TOKEN_REGEX = new RegExp(`\s*${SELF_TOKEN}\s*,?`, 'g');
/*
* [Validation]
* The visitor code below will traverse the animation AST generated by the animation verb functions
* (the output is a tree of objects) and attempt to perform a series of validations on the data. The
* following corner-cases will be validated:
*
* 1. Overlap of animations
* Given that a CSS property cannot be animated in more than one place at the same time, it's
* important that this behavior is detected and validated. The way in which this occurs is that
* each time a style property is examined, a string-map containing the property will be updated with
* the start and end times for when the property is used within an animation step.
*
* If there are two or more parallel animations that are currently running (these are invoked by the
* group()) on the same element then the validator will throw an error. Since the start/end timing
* values are collected for each property then if the current animation step is animating the same
* property and its timing values fall anywhere into the window of time that the property is
* currently being animated within then this is what causes an error.
*
* 2. Timing values
* The validator will validate to see if a timing value of `duration delay easing` or
* `durationNumber` is valid or not.
*
* (note that upon validation the code below will replace the timing data with an object containing
* {duration,delay,easing}.
*
* 3. Offset Validation
* Each of the style() calls are allowed to have an offset value when placed inside of keyframes().
* Offsets within keyframes() are considered valid when:
*
* - No offsets are used at all
* - Each style() entry contains an offset value
* - Each offset is between 0 and 1
* - Each offset is greater to or equal than the previous one
*
* Otherwise an error will be thrown.
*/
/**
* @param {?} driver
* @param {?} metadata
* @param {?} errors
* @return {?}
*/
function buildAnimationAst(driver, metadata, errors) {
return new AnimationAstBuilderVisitor(driver).build(metadata, errors);
}
/** @type {?} */
const ROOT_SELECTOR = '';
class AnimationAstBuilderVisitor {
/**
* @param {?} _driver
*/
constructor(_driver) {
this._driver = _driver;
}
/**
* @param {?} metadata
* @param {?} errors
* @return {?}
*/
build(metadata, errors) {
/** @type {?} */
const context = new AnimationAstBuilderContext(errors);
this._resetContextStyleTimingState(context);
return (/** @type {?} */ (visitDslNode(this, normalizeAnimationEntry(metadata), context)));
}
/**
* @private
* @param {?} context
* @return {?}
*/
_resetContextStyleTimingState(context) {
context.currentQuerySelector = ROOT_SELECTOR;
context.collectedStyles = {};
context.collectedStyles[ROOT_SELECTOR] = {};
context.currentTime = 0;
}
/**
* @param {?} metadata
* @param {?} context
* @return {?}
*/
visitTrigger(metadata, context) {
/** @type {?} */
let queryCount = context.queryCount = 0;
/** @type {?} */
let depCount = context.depCount = 0;
/** @type {?} */
const states = [];
/** @type {?} */
const transitions = [];
if (metadata.name.charAt(0) == '@') {
context.errors.push('animation triggers cannot be prefixed with an `@` sign (e.g. trigger(\'@foo\', [...]))');
}
metadata.definitions.forEach(def => {
this._resetContextStyleTimingState(context);
if (def.type == 0 /* State */) {
/** @type {?} */
const stateDef = (/** @type {?} */ (def));
/** @type {?} */
const name = stateDef.name;
name.toString().split(/\s*,\s*/).forEach(n => {
stateDef.name = n;
states.push(this.visitState(stateDef, context));
});
stateDef.name = name;
}
else if (def.type == 1 /* Transition */) {
/** @type {?} */
const transition = this.visitTransition((/** @type {?} */ (def)), context);
queryCount += transition.queryCount;
depCount += transition.depCount;
transitions.push(transition);
}
else {
context.errors.push('only state() and transition() definitions can sit inside of a trigger()');
}
});
return {
type: 7 /* Trigger */,
name: metadata.name, states, transitions, queryCount, depCount,
options: null
};
}
/**
* @param {?} metadata
* @param {?} context
* @return {?}
*/
visitState(metadata, context) {
/** @type {?} */
const styleAst = this.visitStyle(metadata.styles, context);
/** @type {?} */
const astParams = (metadata.options && metadata.options.params) || null;
if (styleAst.containsDynamicStyles) {
/** @type {?} */
const missingSubs = new Set();
/** @type {?} */
const params = astParams || {};
styleAst.styles.forEach(value => {
if (isObject(value)) {
/** @type {?} */
const stylesObj = (/** @type {?} */ (value));
Object.keys(stylesObj).forEach(prop => {
extractStyleParams(stylesObj[prop]).forEach(sub => {
if (!params.hasOwnProperty(sub)) {
missingSubs.add(sub);
}
});
});
}
});
if (missingSubs.size) {
/** @type {?} */
const missingSubsArr = iteratorToArray(missingSubs.values());
context.errors.push(`state("${metadata.name}", ...) must define default values for all the following style substitutions: ${missingSubsArr.join(', ')}`);
}
}
return {
type: 0 /* State */,
name: metadata.name,
style: styleAst,
options: astParams ? { params: astParams } : null
};
}
/**
* @param {?} metadata
* @param {?} context
* @return {?}
*/
visitTransition(metadata, context) {
context.queryCount = 0;
context.depCount = 0;
/** @type {?} */
const animation = visitDslNode(this, normalizeAnimationEntry(metadata.animation), context);
/** @type {?} */
const matchers = parseTransitionExpr(metadata.expr, context.errors);
return {
type: 1 /* Transition */,
matchers,
animation,
queryCount: context.queryCount,
depCount: context.depCount,
options: normalizeAnimationOptions(metadata.options)
};
}
/**
* @param {?} metadata
* @param {?} context
* @return {?}
*/
visitSequence(metadata, context) {
return {
type: 2 /* Sequence */,
steps: metadata.steps.map(s => visitDslNode(this, s, context)),
options: normalizeAnimationOptions(metadata.options)
};
}
/**
* @param {?} metadata
* @param {?} context
* @return {?}
*/
visitGroup(metadata, context) {
/** @type {?} */
const currentTime = context.currentTime;
/** @type {?} */
let furthestTime = 0;
/** @type {?} */
const steps = metadata.steps.map(step => {
context.currentTime = currentTime;
/** @type {?} */
const innerAst = visitDslNode(this, step, context);
furthestTime = Math.max(furthestTime, context.currentTime);
return innerAst;
});
context.currentTime = furthestTime;
return {
type: 3 /* Group */,
steps,
options: normalizeAnimationOptions(metadata.options)
};
}
/**
* @param {?} metadata
* @param {?} context
* @return {?}
*/
visitAnimate(metadata, context) {
/** @type {?} */
const timingAst = constructTimingAst(metadata.timings, context.errors);
context.currentAnimateTimings = timingAst;
/** @type {?} */
let styleAst;
/** @type {?} */
let styleMetadata = metadata.styles ? metadata.styles : style({});
if (styleMetadata.type == 5 /* Keyframes */) {
styleAst = this.visitKeyframes((/** @type {?} */ (styleMetadata)), context);
}
else {
/** @type {?} */
let styleMetadata = (/** @type {?} */ (metadata.styles));
/** @type {?} */
let isEmpty = false;
if (!styleMetadata) {
isEmpty = true;
/** @type {?} */
const newStyleData = {};
if (timingAst.easing) {
newStyleData['easing'] = timingAst.easing;
}
styleMetadata = style(newStyleData);
}
context.currentTime += timingAst.duration + timingAst.delay;
/** @type {?} */
const _styleAst = this.visitStyle(styleMetadata, context);
_styleAst.isEmptyStep = isEmpty;
styleAst = _styleAst;
}
context.currentAnimateTimings = null;
return {
type: 4 /* Animate */,
timings: timingAst,
style: styleAst,
options: null
};
}
/**
* @param {?} metadata
* @param {?} context
* @return {?}
*/
visitStyle(metadata, context) {
/** @type {?} */
const ast = this._makeStyleAst(metadata, context);
this._validateStyleAst(ast, context);
return ast;
}
/**
* @private
* @param {?} metadata
* @param {?} context
* @return {?}
*/
_makeStyleAst(metadata, context) {
/** @type {?} */
const styles = [];
if (Array.isArray(metadata.styles)) {
((/** @type {?} */ (metadata.styles))).forEach(styleTuple => {
if (typeof styleTuple == 'string') {
if (styleTuple == AUTO_STYLE) {
styles.push((/** @type {?} */ (styleTuple)));
}
else {
context.errors.push(`The provided style string value ${styleTuple} is not allowed.`);
}
}
else {
styles.push((/** @type {?} */ (styleTuple)));
}
});
}
else {
styles.push(metadata.styles);
}
/** @type {?} */
let containsDynamicStyles = false;
/** @type {?} */
let collectedEasing = null;
styles.forEach(styleData => {
if (isObject(styleData)) {
/** @type {?} */
const styleMap = (/** @type {?} */ (styleData));
/** @type {?} */
const easing = styleMap['easing'];
if (easing) {
collectedEasing = (/** @type {?} */ (easing));
delete styleMap['easing'];
}
if (!containsDynamicStyles) {
for (let prop in styleMap) {
/** @type {?} */
const value = styleMap[prop];
if (value.toString().indexOf(SUBSTITUTION_EXPR_START) >= 0) {
containsDynamicStyles = true;
break;
}
}
}
}
});
return {
type: 6 /* Style */,
styles,
easing: collectedEasing,
offset: metadata.offset, containsDynamicStyles,
options: null
};
}
/**
* @private
* @param {?} ast
* @param {?} context
* @return {?}
*/
_validateStyleAst(ast, context) {
/** @type {?} */
const timings = context.currentAnimateTimings;
/** @type {?} */
let endTime = context.currentTime;
/** @type {?} */
let startTime = context.currentTime;
if (timings && startTime > 0) {
startTime -= timings.duration + timings.delay;
}
ast.styles.forEach(tuple => {
if (typeof tuple == 'string')
return;
Object.keys(tuple).forEach(prop => {
if (!this._driver.validateStyleProperty(prop)) {
context.errors.push(`The provided animation property "${prop}" is not a supported CSS property for animations`);
return;
}
/** @type {?} */
const collectedStyles = context.collectedStyles[(/** @type {?} */ (context.currentQuerySelector))];
/** @type {?} */
const collectedEntry = collectedStyles[prop];
/** @type {?} */
let updateCollectedStyle = true;
if (collectedEntry) {
if (startTime != endTime && startTime >= collectedEntry.startTime &&
endTime <= collectedEntry.endTime) {
context.errors.push(`The CSS property "${prop}" that exists between the times of "${collectedEntry.startTime}ms" and "${collectedEntry.endTime}ms" is also being animated in a parallel animation between the times of "${startTime}ms" and "${endTime}ms"`);
updateCollectedStyle = false;
}
// we always choose the smaller start time value since we
// want to have a record of the entire animation window where
// the style property is being animated in between
startTime = collectedEntry.startTime;
}
if (updateCollectedStyle) {
collectedStyles[prop] = { startTime, endTime };
}
if (context.options) {
validateStyleParams(tuple[prop], context.options, context.errors);
}
});
});
}
/**
* @param {?} metadata
* @param {?} context
* @return {?}
*/
visitKeyframes(metadata, context) {
/** @type {?} */
const ast = { type: 5 /* Keyframes */, styles: [], options: null };
if (!context.currentAnimateTimings) {
context.errors.push(`keyframes() must be placed inside of a call to animate()`);
return ast;
}
/** @type {?} */
const MAX_KEYFRAME_OFFSET = 1;
/** @type {?} */
let totalKeyframesWithOffsets = 0;
/** @type {?} */
const offsets = [];
/** @type {?} */
let offsetsOutOfOrder = false;
/** @type {?} */
let keyframesOutOfRange = false;
/** @type {?} */
let previousOffset = 0;
/** @type {?} */
const keyframes = metadata.steps.map(styles => {
/** @type {?} */
const style$$1 = this._makeStyleAst(styles, context);
/** @type {?} */
let offsetVal = style$$1.offset != null ? style$$1.offset : consumeOffset(style$$1.styles);
/** @type {?} */
let offset = 0;
if (offsetVal != null) {
totalKeyframesWithOffsets++;
offset = style$$1.offset = offsetVal;
}
keyframesOutOfRange = keyframesOutOfRange || offset < 0 || offset > 1;
offsetsOutOfOrder = offsetsOutOfOrder || offset < previousOffset;
previousOffset = offset;
offsets.push(offset);
return style$$1;
});
if (keyframesOutOfRange) {
context.errors.push(`Please ensure that all keyframe offsets are between 0 and 1`);
}
if (offsetsOutOfOrder) {
context.errors.push(`Please ensure that all keyframe offsets are in order`);
}
/** @type {?} */
const length = metadata.steps.length;
/** @type {?} */
let generatedOffset = 0;
if (totalKeyframesWithOffsets > 0 && totalKeyframesWithOffsets < length) {
context.errors.push(`Not all style() steps within the declared keyframes() contain offsets`);
}
else if (totalKeyframesWithOffsets == 0) {
generatedOffset = MAX_KEYFRAME_OFFSET / (length - 1);
}
/** @type {?} */
const limit = length - 1;
/** @type {?} */
const currentTime = context.currentTime;
/** @type {?} */
const currentAnimateTimings = (/** @type {?} */ (context.currentAnimateTimings));
/** @type {?} */
const animateDuration = currentAnimateTimings.duration;
keyframes.forEach((kf, i) => {
/** @type {?} */
const offset = generatedOffset > 0 ? (i == limit ? 1 : (generatedOffset * i)) : offsets[i];
/** @type {?} */
const durationUpToThisFrame = offset * animateDuration;
context.currentTime = currentTime + currentAnimateTimings.delay + durationUpToThisFrame;
currentAnimateTimings.duration = durationUpToThisFrame;
this._validateStyleAst(kf, context);
kf.offset = offset;
ast.styles.push(kf);
});
return ast;
}
/**
* @param {?} metadata
* @param {?} context
* @return {?}
*/
visitReference(metadata, context) {
return {
type: 8 /* Reference */,
animation: visitDslNode(this, normalizeAnimationEntry(metadata.animation), context),
options: normalizeAnimationOptions(metadata.options)
};
}
/**
* @param {?} metadata
* @param {?} context
* @return {?}
*/
visitAnimateChild(metadata, context) {
context.depCount++;
return {
type: 9 /* AnimateChild */,
options: normalizeAnimationOptions(metadata.options)
};
}
/**
* @param {?} metadata
* @param {?} context
* @return {?}
*/
visitAnimateRef(metadata, context) {
return {
type: 10 /* AnimateRef */,
animation: this.visitReference(metadata.animation, context),
options: normalizeAnimationOptions(metadata.options)
};
}
/**
* @param {?} metadata
* @param {?} context
* @return {?}
*/
visitQuery(metadata, context) {
/** @type {?} */
const parentSelector = (/** @type {?} */ (context.currentQuerySelector));
/** @type {?} */
const options = (/** @type {?} */ ((metadata.options || {})));
context.queryCount++;
context.currentQuery = metadata;
const [selector, includeSelf] = normalizeSelector(metadata.selector);
context.currentQuerySelector =
parentSelector.length ? (parentSelector + ' ' + selector) : selector;
getOrSetAsInMap(context.collectedStyles, context.currentQuerySelector, {});
/** @type {?} */
const animation = visitDslNode(this, normalizeAnimationEntry(metadata.animation), context);
context.currentQuery = null;
context.currentQuerySelector = parentSelector;
return {
type: 11 /* Query */,
selector,
limit: options.limit || 0,
optional: !!options.optional, includeSelf, animation,
originalSelector: metadata.selector,
options: normalizeAnimationOptions(metadata.options)
};
}
/**
* @param {?} metadata
* @param {?} context
* @return {?}
*/
visitStagger(metadata, context) {
if (!context.currentQuery) {
context.errors.push(`stagger() can only be used inside of query()`);
}
/** @type {?} */
const timings = metadata.timings === 'full' ?
{ duration: 0, delay: 0, easing: 'full' } :
resolveTiming(metadata.timings, context.errors, true);
return {
type: 12 /* Stagger */,
animation: visitDslNode(this, normalizeAnimationEntry(metadata.animation), context), timings,
options: null
};
}
}
/**
* @param {?} selector
* @return {?}
*/
function normalizeSelector(selector) {
/** @type {?} */
const hasAmpersand = selector.split(/\s*,\s*/).find(token => token == SELF_TOKEN) ? true : false;
if (hasAmpersand) {
selector = selector.replace(SELF_TOKEN_REGEX, '');
}
// the :enter and :leave selectors are filled in at runtime during timeline building
selector = selector.replace(/@\*/g, NG_TRIGGER_SELECTOR)
.replace(/@\w+/g, match => NG_TRIGGER_SELECTOR + '-' + match.substr(1))
.replace(/:animating/g, NG_ANIMATING_SELECTOR);
return [selector, hasAmpersand];
}
/**
* @param {?} obj
* @return {?}
*/
function normalizeParams(obj) {
return obj ? copyObj(obj) : null;
}
class AnimationAstBuilderContext {
/**
* @param {?} errors
*/
constructor(errors) {
this.errors = errors;
this.queryCount = 0;
this.depCount = 0;
this.currentTransition = null;
this.currentQuery = null;
this.currentQuerySelector = null;
this.currentAnimateTimings = null;
this.currentTime = 0;
this.collectedStyles = {};
this.options = null;
}
}
/**
* @param {?} styles
* @return {?}
*/
function consumeOffset(styles) {
if (typeof styles == 'string')
return null;
/** @type {?} */
let offset = null;
if (Array.isArray(styles)) {
styles.forEach(styleTuple => {
if (isObject(styleTuple) && styleTuple.hasOwnProperty('offset')) {
/** @type {?} */
const obj = (/** @type {?} */ (styleTuple));
offset = parseFloat((/** @type {?} */ (obj['offset'])));
delete obj['offset'];
}
});
}
else if (isObject(styles) && styles.hasOwnProperty('offset')) {
/** @type {?} */
const obj = (/** @type {?} */ (styles));
offset = parseFloat((/** @type {?} */ (obj['offset'])));
delete obj['offset'];
}
return offset;
}
/**
* @param {?} value
* @return {?}
*/
function isObject(value) {
return !Array.isArray(value) && typeof value == 'object';
}
/**
* @param {?} value
* @param {?} errors
* @return {?}
*/
function constructTimingAst(value, errors) {
/** @type {?} */
let timings = null;
if (value.hasOwnProperty('duration')) {
timings = (/** @type {?} */ (value));
}
else if (typeof value == 'number') {
/** @type {?} */
const duration = resolveTiming((/** @type {?} */ (value)), errors).duration;
return makeTimingAst((/** @type {?} */ (duration)), 0, '');
}
/** @type {?} */
const strValue = (/** @type {?} */ (value));
/** @type {?} */
const isDynamic = strValue.split(/\s+/).some(v => v.charAt(0) == '{' && v.charAt(1) == '{');
if (isDynamic) {
/** @type {?} */
const ast = (/** @type {?} */ (makeTimingAst(0, 0, '')));
ast.dynamic = true;
ast.strValue = strValue;
return (/** @type {?} */ (ast));
}
timings = timings || resolveTiming(strValue, errors);
return makeTimingAst(timings.duration, timings.delay, timings.easing);
}
/**
* @param {?} options
* @return {?}
*/
function normalizeAnimationOptions(options) {
if (options) {
options = copyObj(options);
if (options['params']) {
options['params'] = (/** @type {?} */ (normalizeParams(options['params'])));
}
}
else {
options = {};
}
return options;
}
/**
* @param {?} duration
* @param {?} delay
* @param {?} easing
* @return {?}
*/
function makeTimingAst(duration, delay, easing) {
return { duration, delay, easing };
}
/**
* @fileoverview added by tsickle
* @suppress {checkTypes,extraRequire,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
/**
* @param {?} element
* @param {?} keyframes
* @param {?} preStyleProps
* @param {?} postStyleProps
* @param {?} duration
* @param {?} delay
* @param {?=} easing
* @param {?=} subTimeline
* @return {?}
*/
function createTimelineInstruction(element, keyframes, preStyleProps, postStyleProps, duration, delay, easing = null, subTimeline = false) {
return {
type: 1 /* TimelineAnimation */,
element,
keyframes,
preStyleProps,
postStyleProps,
duration,
delay,
totalTime: duration + delay, easing, subTimeline
};
}
/**
* @fileoverview added by tsickle
* @suppress {checkTypes,extraRequire,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
class ElementInstructionMap {
constructor() {
this._map = new Map();
}
/**
* @param {?} element
* @return {?}
*/
consume(element) {
/** @type {?} */
let instructions = this._map.get(element);
if (instructions) {
this._map.delete(element);
}
else {
instructions = [];
}
return instructions;
}
/**
* @param {?} element
* @param {?} instructions
* @return {?}
*/
append(element, instructions) {
/** @type {?} */
let existingInstructions = this._map.get(element);
if (!existingInstructions) {
this._map.set(element, existingInstructions = []);
}
existingInstructions.push(...instructions);
}
/**
* @param {?} element
* @return {?}
*/
has(element) { return this._map.has(element); }
/**
* @return {?}
*/
clear() { this._map.clear(); }
}
/**
* @fileoverview added by tsickle
* @suppress {checkTypes,extraRequire,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
/** @type {?} */
const ONE_FRAME_IN_MILLISECONDS = 1;
/** @type {?} */
const ENTER_TOKEN = ':enter';
/** @type {?} */
const ENTER_TOKEN_REGEX = new RegExp(ENTER_TOKEN, 'g');
/** @type {?} */
const LEAVE_TOKEN = ':leave';
/** @type {?} */
const LEAVE_TOKEN_REGEX = new RegExp(LEAVE_TOKEN, 'g');
/*
* The code within this file aims to generate web-animations-compatible keyframes from Angular's
* animation DSL code.
*
* The code below will be converted from:
*
* ```
* sequence([
* style({ opacity: 0 }),
* animate(1000, style({ opacity: 0 }))
* ])
* ```
*
* To:
* ```
* keyframes = [{ opacity: 0, offset: 0 }, { opacity: 1, offset: 1 }]
* duration = 1000
* delay = 0
* easing = ''
* ```
*
* For this operation to cover the combination of animation verbs (style, animate, group, etc...) a
* combination of prototypical inheritance, AST traversal and merge-sort-like algorithms are used.
*
* [AST Traversal]
* Each of the animation verbs, when executed, will return an string-map object representing what
* type of action it is (style, animate, group, etc...) and the data associated with it. This means
* that when functional composition mix of these functions is evaluated (like in the example above)
* then it will end up producing a tree of objects representing the animation itself.
*
* When this animation object tree is processed by the visitor code below it will visit each of the
* verb statements within the visitor. And during each visit it will build the context of the
* animation keyframes by interacting with the `TimelineBuilder`.
*
* [TimelineBuilder]
* This class is responsible for tracking the styles and building a series of keyframe objects for a
* timeline between a start and end time. The builder starts off with an initial timeline and each
* time the AST comes across a `group()`, `keyframes()` or a combination of the two wihtin a
* `sequence()` then it will generate a sub timeline for each step as well as a new one after
* they are complete.
*
* As the AST is traversed, the timing state on each of the timelines will be incremented. If a sub
* timeline was created (based on one of the cases above) then the parent timeline will attempt to
* merge the styles used within the sub timelines into itself (only with group() this will happen).
* This happens with a merge operation (much like how the merge works in mergesort) and it will only
* copy the most recently used styles from the sub timelines into the parent timeline. This ensures
* that if the styles are used later on in another phase of the animation then they will be the most
* up-to-date values.
*
* [How Missing Styles Are Updated]
* Each timeline has a `backFill` property which is responsible for filling in new styles into
* already processed keyframes if a new style shows up later within the animation sequence.
*
* ```
* sequence([
* style({ width: 0 }),
* animate(1000, style({ width: 100 })),
* animate(1000, style({ width: 200 })),
* animate(1000, style({ width: 300 }))
* animate(1000, style({ width: 400, height: 400 })) // notice how `height` doesn't exist anywhere
* else
* ])
* ```
*
* What is happening here is that the `height` value is added later in the sequence, but is missing
* from all previous animation steps. Therefore when a keyframe is created it would also be missing
* from all previous keyframes up until where it is first used. For the timeline keyframe generation
* to properly fill in the style it will place the previous value (the value from the parent
* timeline) or a default value of `*` into the backFill object. Given that each of the keyframe
* styles are objects that prototypically inhert from the backFill object, this means that if a
* value is added into the backFill then it will automatically propagate any missing values to all
* keyframes. Therefore the missing `height` value will be properly filled into the already
* processed keyframes.
*
* When a sub-timeline is created it will have its own backFill property. This is done so that
* styles present within the sub-timeline do not accidentally seep into the previous/future timeline
* keyframes
*
* (For prototypically-inherited contents to be detected a `for(i in obj)` loop must be used.)
*
* [Validation]
* The code in this file is not responsible for validation. That functionality happens with within
* the `AnimationValidatorVisitor` code.
*/
/**
* @param {?} driver
* @param {?} rootElement
* @param {?} ast
* @param {?} enterClassName
* @param {?} leaveClassName
* @param {?=} startingStyles
* @param {?=} finalStyles
* @param {?=} options
* @param {?=} subInstructions
* @param {?=} errors
* @return {?}
*/
function buildAnimationTimelines(driver, rootElement, ast, enterClassName, leaveClassName, startingStyles = {}, finalStyles = {}, options, subInstructions, errors = []) {
return new AnimationTimelineBuilderVisitor().buildKeyframes(driver, rootElement, ast, enterClassName, leaveClassName, startingStyles, finalStyles, options, subInstructions, errors);
}
class AnimationTimelineBuilderVisitor {
/**
* @param {?} driver
* @param {?} rootElement
* @param {?} ast
* @param {?} enterClassName
* @param {?} leaveClassName
* @param {?} startingStyles
* @param {?} finalStyles
* @param {?} options
* @param {?=} subInstructions
* @param {?=} errors
* @return {?}
*/
buildKeyframes(driver, rootElement, ast, enterClassName, leaveClassName, startingStyles, finalStyles, options, subInstructions, errors = []) {
subInstructions = subInstructions || new ElementInstructionMap();
/** @type {?} */
const context = new AnimationTimelineContext(driver, rootElement, subInstructions, enterClassName, leaveClassName, errors, []);
context.options = options;
context.currentTimeline.setStyles([startingStyles], null, context.errors, options);
visitDslNode(this, ast, context);
// this checks to see if an actual animation happened
/** @type {?} */
const timelines = context.timelines.filter(timeline => timeline.containsAnimation());
if (timelines.length && Object.keys(finalStyles).length) {
/** @type {?} */
const tl = timelines[timelines.length - 1];
if (!tl.allowOnlyTimelineStyles()) {
tl.setStyles([finalStyles], null, context.errors, options);
}
}
return timelines.length ? timelines.map(timeline => timeline.buildKeyframes()) :
[createTimelineInstruction(rootElement, [], [], [], 0, 0, '', false)];
}
/**
* @param {?} ast
* @param {?} context
* @return {?}
*/
visitTrigger(ast, context) {
// these values are not visited in this AST
}
/**
* @param {?} ast
* @param {?} context
* @return {?}
*/
visitState(ast, context) {
// these values are not visited in this AST
}
/**
* @param {?} ast
* @param {?} context
* @return {?}
*/
visitTransition(ast, context) {
// these values are not visited in this AST
}
/**
* @param {?} ast
* @param {?} context
* @return {?}
*/
visitAnimateChild(ast, context) {
/** @type {?} */
const elementInstructions = context.subInstructions.consume(context.element);
if (elementInstructions) {
/** @type {?} */
const innerContext = context.createSubContext(ast.options);
/** @type {?} */
const startTime = context.currentTimeline.currentTime;
/** @type {?} */
const endTime = this._visitSubInstructions(elementInstructions, innerContext, (/** @type {?} */ (innerContext.options)));
if (startTime != endTime) {
// we do this on the upper context because we created a sub context for
// the sub child animations
context.transformIntoNewTimeline(endTime);
}
}
context.previousNode = ast;
}
/**
* @param {?} ast
* @param {?} context
* @return {?}
*/
visitAnimateRef(ast, context) {
/** @type {?} */
const innerContext = context.createSubContext(ast.options);
innerContext.transformIntoNewTimeline();
this.visitReference(ast.animation, innerContext);
context.transformIntoNewTimeline(innerContext.currentTimeline.currentTime);
context.previousNode = ast;
}
/**
* @private
* @param {?} instructions
* @param {?} context
* @param {?} options
* @return {?}
*/
_visitSubInstructions(instructions, context, options) {
/** @type {?} */
const startTime = context.currentTimeline.currentTime;
/** @type {?} */
let furthestTime = startTime;
// this is a special-case for when a user wants to skip a sub
// animation from being fired entirely.
/** @type {?} */
const duration = options.duration != null ? resolveTimingValue(options.duration) : null;
/** @type {?} */
const delay = options.delay != null ? resolveTimingValue(options.delay) : null;
if (duration !== 0) {
instructions.forEach(instruction => {
/** @type {?} */
const instructionTimings = context.appendInstructionToTimeline(instruction, duration, delay);
furthestTime =
Math.max(furthestTime, instructionTimings.duration + instructionTimings.delay);
});
}
return furthestTime;
}
/**
* @param {?} ast
* @param {?} context
* @return {?}
*/
visitReference(ast, context) {
context.updateOptions(ast.options, true);
visitDslNode(this, ast.animation, context);
context.previousNode = ast;
}
/**
* @param {?} ast
* @param {?} context
* @return {?}
*/
visitSequence(ast, context) {
/** @type {?} */
const subContextCount = context.subContextCount;
/** @type {?} */
let ctx = context;
/** @type {?} */
const options = ast.options;
if (options && (options.params || options.delay)) {
ctx = context.createSubContext(options);
ctx.transformIntoNewTimeline();
if (options.delay != null) {
if (ctx.previousNode.type == 6 /* Style */) {
ctx.currentTimeline.snapshotCurrentStyles();
ctx.previousNode = DEFAULT_NOOP_PREVIOUS_NODE;
}
/** @type {?} */
const delay = resolveTimingValue(options.delay);
ctx.delayNextStep(delay);
}
}
if (ast.steps.length) {
ast.steps.forEach(s => visitDslNode(this, s, ctx));
// this is here just incase the inner steps only contain or end with a style() call
ctx.currentTimeline.applyStylesToKeyframe();
// this means that some animation function within the sequence
// ended up creating a sub timeline (which means the current
// timeline cannot overlap with the contents of the sequence)
if (ctx.subContextCount > subContextCount) {
ctx.transformIntoNewTimeline();
}
}
context.previousNode = ast;
}
/**
* @param {?} ast
* @param {?} context
* @return {?}
*/
visitGroup(ast, context) {
/** @type {?} */
const innerTimelines = [];
/** @type {?} */
let furthestTime = context.currentTimeline.currentTime;
/** @type {?} */
const delay = ast.options && ast.options.delay ? resolveTimingValue(ast.options.delay) : 0;
ast.steps.forEach(s => {
/** @type {?} */
const innerContext = context.createSubContext(ast.options);
if (delay) {
innerContext.delayNextStep(delay);
}
visitDslNode(this, s, innerContext);
furthestTime = Math.max(furthestTime, innerContext.currentTimeline.currentTime);
innerTimelines.push(innerContext.currentTimeline);
});
// this operation is run after the AST loop because otherwise
// if the parent timeline's collected styles were updated then
// it would pass in invalid data into the new-to-be forked items
innerTimelines.forEach(timeline => context.currentTimeline.mergeTimelineCollectedStyles(timeline));
context.transformIntoNewTimeline(furthestTime);
context.previousNode = ast;
}
/**
* @private
* @param {?} ast
* @param {?} context
* @return {?}
*/
_visitTiming(ast, context) {
if (((/** @type {?} */ (ast))).dynamic) {
/** @type {?} */
const strValue = ((/** @type {?} */ (ast))).strValue;
/** @type {?} */
const timingValue = context.params ? interpolateParams(strValue, context.params, context.errors) : strValue;
return resolveTiming(timingValue, context.errors);
}
else {
return { duration: ast.duration, delay: ast.delay, easing: ast.easing };
}
}
/**
* @param {?} ast
* @param {?} context
* @return {?}
*/
visitAnimate(ast, context) {
/** @type {?} */
const timings = context.currentAnimateTimings = this._visitTiming(ast.timings, context);
/** @type {?} */
const timeline = context.currentTimeline;
if (timings.delay) {
context.incrementTime(timings.delay);
timeline.snapshotCurrentStyles();
}
/** @type {?} */
const style$$1 = ast.style;
if (style$$1.type == 5 /* Keyframes */) {
this.visitKeyframes(style$$1, context);
}
else {
context.incrementTime(timings.duration);
this.visitStyle((/** @type {?} */ (style$$1)), context);
timeline.applyStylesToKeyframe();
}
context.currentAnimateTimings = null;
context.previousNode = ast;
}
/**
* @param {?} ast
* @param {?} context
* @return {?}
*/
visitStyle(ast, context) {
/** @type {?} */
const timeline = context.currentTimeline;
/** @type {?} */
const timings = (/** @type {?} */ (context.currentAnimateTimings));
// this is a special case for when a style() call
// directly follows an animate() call (but not inside of an animate() call)
if (!timings && timeline.getCurrentStyleProperties().length) {
timeline.forwardFrame();
}
/** @type {?} */
const easing = (timings && timings.easing) || ast.easing;
if (ast.isEmptyStep) {
timeline.applyEmptyStep(easing);
}
else {
timeline.setStyles(ast.styles, easing, context.errors, context.options);
}
context.previousNode = ast;
}
/**
* @param {?} ast
* @param {?} context
* @return {?}
*/
visitKeyframes(ast, context) {
/** @type {?} */
const currentAnimateTimings = (/** @type {?} */ (context.currentAnimateTimings));
/** @type {?} */
const startTime = ((/** @type {?} */ (context.currentTimeline))).duration;
/** @type {?} */
const duration = currentAnimateTimings.duration;
/** @type {?} */
const innerContext = context.createSubContext();
/** @type {?} */
const innerTimeline = innerContext.currentTimeline;
innerTimeline.easing = currentAnimateTimings.easing;
ast.styles.forEach(step => {
/** @type {?} */
const offset = step.offset || 0;
innerTimeline.forwardTime(offset * duration);
innerTimeline.setStyles(step.styles, step.easing, context.errors, context.options);
innerTimeline.applyStylesToKeyframe();
});
// this will ensure that the parent timeline gets all the styles from
// the child even if the new timeline below is not used
context.currentTimeline.mergeTimelineCollectedStyles(innerTimeline);
// we do this because the window between this timeline and the sub timeline
// should ensure that the styles within are exactly the same as they were before
context.transformIntoNewTimeline(startTime + duration);
context.previousNode = ast;
}
/**
* @param {?} ast
* @param {?} context
* @return {?}
*/
visitQuery(ast, context) {
// in the event that the first step before this is a style step we need
// to ensure the styles are applied before the children are animated
/** @type {?} */
const startTime = context.currentTimeline.currentTime;
/** @type {?} */
const options = (/** @type {?} */ ((ast.options || {})));
/** @type {?} */
const delay = options.delay ? resolveTimingValue(options.delay) : 0;
if (delay && (context.previousNode.type === 6 /* Style */ ||
(startTime == 0 && context.currentTimeline.getCurrentStyleProperties().length))) {
context.currentTimeline.snapshotCurrentStyles();
context.previousNode = DEFAULT_NOOP_PREVIOUS_NODE;
}
/** @type {?} */
let furthestTime = startTime;
/** @type {?} */
const elms = context.invokeQuery(ast.selector, ast.originalSelector, ast.limit, ast.includeSelf, options.optional ? true : false, context.errors);
context.currentQueryTotal = elms.length;
/** @type {?} */
let sameElementTimeline = null;
elms.forEach((element, i) => {
context.currentQueryIndex = i;
/** @type {?} */
const innerContext = context.createSubContext(ast.options, element);
if (delay) {
innerContext.delayNextStep(delay);
}
if (element === context.element) {
sameElementTimeline = innerContext.currentTimeline;
}
visitDslNode(this, ast.animation, innerContext);
// this is here just incase the inner steps only contain or end
// with a style() call (which is here to signal that this is a preparatory
// call to style an element before it is animated again)
innerContext.currentTimeline.applyStylesToKeyframe();
/** @type {?} */
const endTime = innerContext.currentTimeline.currentTime;
furthestTime = Math.max(furthestTime, endTime);
});
context.currentQueryIndex = 0;
context.currentQueryTotal = 0;
context.transformIntoNewTimeline(furthestTime);
if (sameElementTimeline) {
context.currentTimeline.mergeTimelineCollectedStyles(sameElementTimeline);
context.currentTimeline.snapshotCurrentStyles();
}
context.previousNode = ast;
}
/**
* @param {?} ast
* @param {?} context
* @return {?}
*/
visitStagger(ast, context) {
/** @type {?} */
const parentContext = (/** @type {?} */ (context.parentContext));
/** @type {?} */
const tl = context.currentTimeline;
/** @type {?} */
const timings = ast.timings;
/** @type {?} */
const duration = Math.abs(timings.duration);
/** @type {?} */
const maxTime = duration * (context.currentQueryTotal - 1);
/** @type {?} */
let delay = duration * context.currentQueryIndex;
/** @type {?} */
let staggerTransformer = timings.duration < 0 ? 'reverse' : timings.easing;
switch (staggerTransformer) {
case 'reverse':
delay = maxTime - delay;
break;
case 'full':
delay = parentContext.currentStaggerTime;
break;
}
/** @type {?} */
const timeline = context.currentTimeline;
if (delay) {
timeline.delayNextStep(delay);
}
/** @type {?} */
const startingTime = timeline.currentTime;
visitDslNode(this, ast.animation, context);
context.previousNode = ast;
// time = duration + delay
// the reason why this computation is so complex is because
// the inner timeline may either have a delay value or a stretched
// keyframe depending on if a subtimeline is not used or is used.
parentContext.currentStaggerTime =
(tl.currentTime - startingTime) + (tl.startTime - parentContext.currentTimeline.startTime);
}
}
/** @type {?} */
const DEFAULT_NOOP_PREVIOUS_NODE = (/** @type {?} */ ({}));
class AnimationTimelineContext {
/**
* @param {?} _driver
* @param {?} element
* @param {?} subInstructions
* @param {?} _enterClassName
* @param {?} _leaveClassName
* @param {?} errors
* @param {?} timelines
* @param {?=} initialTimeline
*/
constructor(_driver, element, subInstructions, _enterClassName, _leaveClassName, errors, timelines, initialTimeline) {
this._driver = _driver;
this.element = element;
this.subInstructions = subInstructions;
this._enterClassName = _enterClassName;
this._leaveClassName = _leaveClassName;
this.errors = errors;
this.timelines = timelines;
this.parentContext = null;
this.currentAnimateTimings = null;
this.previousNode = DEFAULT_NOOP_PREVIOUS_NODE;
this.subContextCount = 0;
this.options = {};
this.currentQueryIndex = 0;
this.currentQueryTotal = 0;
this.currentStaggerTime = 0;
this.currentTimeline = initialTimeline || new TimelineBuilder(this._driver, element, 0);
timelines.push(this.currentTimeline);
}
/**
* @return {?}
*/
get params() { return this.options.params; }
/**
* @param {?} options
* @param {?=} skipIfExists
* @return {?}
*/
updateOptions(options, skipIfExists) {
if (!options)
return;
/** @type {?} */
const newOptions = (/** @type {?} */ (options));
/** @type {?} */
let optionsToUpdate = this.options;
// NOTE: this will get patched up when other animation methods support duration overrides
if (newOptions.duration != null) {
((/** @type {?} */ (optionsToUpdate))).duration = resolveTimingValue(newOptions.duration);
}
if (newOptions.delay != null) {
optionsToUpdate.delay = resolveTimingValue(newOptions.delay);
}
/** @type {?} */
const newParams = newOptions.params;
if (newParams) {
/** @type {?} */
let paramsToUpdate = (/** @type {?} */ (optionsToUpdate.params));
if (!paramsToUpdate) {
paramsToUpdate = this.options.params = {};
}
Object.keys(newParams).forEach(name => {
if (!skipIfExists || !paramsToUpdate.hasOwnProperty(name)) {
paramsToUpdate[name] = interpolateParams(newParams[name], paramsToUpdate, this.errors);
}
});
}
}
/**
* @private
* @return {?}
*/
_copyOptions() {
/** @type {?} */
const options = {};
if (this.options) {
/** @type {?} */
const oldParams = this.options.params;
if (oldParams) {
/** @type {?} */
const params = options['params'] = {};
Object.keys(oldParams).forEach(name => { params[name] = oldParams[name]; });
}
}
return options;
}
/**
* @param {?=} options
* @param {?=} element
* @param {?=} newTime
* @return {?}
*/
createSubContext(options = null, element, newTime) {
/** @type {?} */
const target = element || this.element;
/** @type {?} */
const context = new AnimationTimelineContext(this._driver, target, this.subInstructions, this._enterClassName, this._leaveClassName, this.errors, this.timelines, this.currentTimeline.fork(target, newTime || 0));
context.previousNode = this.previousNode;
context.currentAnimateTimings = this.currentAnimateTimings;
context.options = this._copyOptions();
context.updateOptions(options);
context.currentQueryIndex = this.currentQueryIndex;
context.currentQueryTotal = this.currentQueryTotal;
context.parentContext = this;
this.subContextCount++;
return context;
}
/**
* @param {?=} newTime
* @return {?}
*/
transformIntoNewTimeline(newTime) {
this.previousNode = DEFAULT_NOOP_PREVIOUS_NODE;
this.currentTimeline = this.currentTimeline.fork(this.element, newTime);
this.timelines.push(this.currentTimeline);
return this.currentTimeline;
}
/**
* @param {?} instruction
* @param {?} duration
* @param {?} delay
* @return {?}
*/
appendInstructionToTimeline(instruction, duration, delay) {
/** @type {?} */
const updatedTimings = {
duration: duration != null ? duration : instruction.duration,
delay: this.currentTimeline.currentTime + (delay != null ? delay : 0) + instruction.delay,
easing: ''
};
/** @type {?} */
const builder = new SubTimelineBuilder(this._driver, instruction.element, instruction.keyframes, instruction.preStyleProps, instruction.postStyleProps, updatedTimings, instruction.stretchStartingKeyframe);
this.timelines.push(builder);
return updatedTimings;
}
/**
* @param {?} time
* @return {?}
*/
incrementTime(time) {
this.currentTimeline.forwardTime(this.currentTimeline.duration + time);
}
/**
* @param {?} delay
* @return {?}
*/
delayNextStep(delay) {
// negative delays are not yet supported
if (delay > 0) {
this.currentTimeline.delayNextStep(delay);
}
}
/**
* @param {?} selector
* @param {?} originalSelector
* @param {?} limit
* @param {?} includeSelf
* @param {?} optional
* @param {?} errors
* @return {?}
*/
invokeQuery(selector, originalSelector, limit, includeSelf, optional, errors) {
/** @type {?} */
let results = [];
if (includeSelf) {
results.push(this.element);
}
if (selector.length > 0) { // if :self is only used then the selector is empty
selector = selector.replace(ENTER_TOKEN_REGEX, '.' + this._enterClassName);
selector = selector.replace(LEAVE_TOKEN_REGEX, '.' + this._leaveClassName);
/** @type {?} */
const multi = limit != 1;
/** @type {?} */
let elements = this._driver.query(this.element, selector, multi);
if (limit !== 0) {
elements = limit < 0 ? elements.slice(elements.length + limit, elements.length) :
elements.slice(0, limit);
}
results.push(...elements);
}
if (!optional && results.length == 0) {
errors.push(`\`query("${originalSelector}")\` returned zero elements. (Use \`query("${originalSelector}", { optional: true })\` if you wish to allow this.)`);
}
return results;
}
}
class TimelineBuilder {
/**
* @param {?} _driver
* @param {?} element
* @param {?} startTime
* @param {?=} _elementTimelineStylesLookup
*/
constructor(_driver, element, startTime, _elementTimelineStylesLookup) {
this._driver = _driver;
this.element = element;
this.startTime = startTime;
this._elementTimelineStylesLookup = _elementTimelineStylesLookup;
this.duration = 0;
this._previousKeyframe = {};
this._currentKeyframe = {};
this._keyframes = new Map();
this._styleSummary = {};
this._pendingStyles = {};
this._backFill = {};
this._currentEmptyStepKeyframe = null;
if (!this._elementTimelineStylesLookup) {
this._elementTimelineStylesLookup = new Map();
}
this._localTimelineStyles = Object.create(this._backFill, {});
this._globalTimelineStyles = (/** @type {?} */ (this._elementTimelineStylesLookup.get(element)));
if (!this._globalTimelineStyles) {
this._globalTimelineStyles = this._localTimelineStyles;
this._elementTimelineStylesLookup.set(element, this._localTimelineStyles);
}
this._loadKeyframe();
}
/**
* @return {?}
*/
containsAnimation() {
switch (this._keyframes.size) {
case 0:
return false;
case 1:
return this.getCurrentStyleProperties().length > 0;
default:
return true;
}
}
/**
* @return {?}
*/
getCurrentStyleProperties() { return Object.keys(this._currentKeyframe); }
/**
* @return {?}
*/
get currentTime() { return this.startTime + this.duration; }
/**
* @param {?} delay
* @return {?}
*/
delayNextStep(delay) {
// in the event that a style() step is placed right before a stagger()
// and that style() step is the very first style() value in the animation
// then we need to make a copy of the keyframe [0, copy, 1] so that the delay
// properly applies the style() values to work with the stagger...
/** @type {?} */
const hasPreStyleStep = this._keyframes.size == 1 && Object.keys(this._pendingStyles).length;
if (this.duration || hasPreStyleStep) {
this.forwardTime(this.currentTime + delay);
if (hasPreStyleStep) {
this.snapshotCurrentStyles();
}
}
else {
this.startTime += delay;
}
}
/**
* @param {?} element
* @param {?=} currentTime
* @return {?}
*/
fork(element, currentTime) {
this.applyStylesToKeyframe();
return new TimelineBuilder(this._driver, element, currentTime || this.currentTime, this._elementTimelineStylesLookup);
}
/**
* @private
* @return {?}
*/
_loadKeyframe() {
if (this._currentKeyframe) {
this._previousKeyframe = this._currentKeyframe;
}
this._currentKeyframe = (/** @type {?} */ (this._keyframes.get(this.duration)));
if (!this._currentKeyframe) {
this._currentKeyframe = Object.create(this._backFill, {});
this._keyframes.set(this.duration, this._currentKeyframe);
}
}
/**
* @return {?}
*/
forwardFrame() {
this.duration += ONE_FRAME_IN_MILLISECONDS;
this._loadKeyframe();
}
/**
* @param {?} time
* @return {?}
*/
forwardTime(time) {
this.applyStylesToKeyframe();
this.duration = time;
this._loadKeyframe();
}
/**
* @private
* @param {?} prop
* @param {?} value
* @return {?}
*/
_updateStyle(prop, value) {
this._localTimelineStyles[prop] = value;
this._globalTimelineStyles[prop] = value;
this._styleSummary[prop] = { time: this.currentTime, value };
}
/**
* @return {?}
*/
allowOnlyTimelineStyles() { return this._currentEmptyStepKeyframe !== this._currentKeyframe; }
/**
* @param {?} easing
* @return {?}
*/
applyEmptyStep(easing) {
if (easing) {
this._previousKeyframe['easing'] = easing;
}
// special case for animate(duration):
// all missing styles are filled with a `*` value then
// if any destination styles are filled in later on the same
// keyframe then they will override the overridden styles
// We use `_globalTimelineStyles` here because there may be
// styles in previous keyframes that are not present in this timeline
Object.keys(this._globalTimelineStyles).forEach(prop => {
this._backFill[prop] = this._globalTimelineStyles[prop] || AUTO_STYLE;
this._currentKeyframe[prop] = AUTO_STYLE;
});
this._currentEmptyStepKeyframe = this._currentKeyframe;
}
/**
* @param {?} input
* @param {?} easing
* @param {?} errors
* @param {?=} options
* @return {?}
*/
setStyles(input, easing, errors, options) {
if (easing) {
this._previousKeyframe['easing'] = easing;
}
/** @type {?} */
const params = (options && options.params) || {};
/** @type {?} */
const styles = flattenStyles(input, this._globalTimelineStyles);
Object.keys(styles).forEach(prop => {
/** @type {?} */
const val = interpolateParams(styles[prop], params, errors);
this._pendingStyles[prop] = val;
if (!this._localTimelineStyles.hasOwnProperty(prop)) {
this._backFill[prop] = this._globalTimelineStyles.hasOwnProperty(prop) ?
this._globalTimelineStyles[prop] :
AUTO_STYLE;
}
this._updateStyle(prop, val);
});
}
/**
* @return {?}
*/
applyStylesToKeyframe() {
/** @type {?} */
const styles = this._pendingStyles;
/** @type {?} */
const props = Object.keys(styles);
if (props.length == 0)
return;
this._pendingStyles = {};
props.forEach(prop => {
/** @type {?} */
const val = styles[prop];
this._currentKeyframe[prop] = val;
});
Object.keys(this._localTimelineStyles).forEach(prop => {
if (!this._currentKeyframe.hasOwnProperty(prop)) {
this._currentKeyframe[prop] = this._localTimelineStyles[prop];
}
});
}
/**
* @return {?}
*/
snapshotCurrentStyles() {
Object.keys(this._localTimelineStyles).forEach(prop => {
/** @type {?} */
const val = this._localTimelineStyles[prop];
this._pendingStyles[prop] = val;
this._updateStyle(prop, val);
});
}
/**
* @return {?}
*/
getFinalKeyframe() { return this._keyframes.get(this.duration); }
/**
* @return {?}
*/
get properties() {
/** @type {?} */
const properties = [];
for (let prop in this._currentKeyframe) {
properties.push(prop);
}
return properties;
}
/**
* @param {?} timeline
* @return {?}
*/
mergeTimelineCollectedStyles(timeline) {
Object.keys(timeline._styleSummary).forEach(prop => {
/** @type {?} */
const details0 = this._styleSummary[prop];
/** @type {?} */
const details1 = timeline._styleSummary[prop];
if (!details0 || details1.time > details0.time) {
this._updateStyle(prop, details1.value);
}
});
}
/**
* @return {?}
*/
buildKeyframes() {
this.applyStylesToKeyframe();
/** @type {?} */
const preStyleProps = new Set();
/** @type {?} */
const postStyleProps = new Set();
/** @type {?} */
const isEmpty = this._keyframes.size === 1 && this.duration === 0;
/** @type {?} */
let finalKeyframes = [];
this._keyframes.forEach((keyframe, time) => {
/** @type {?} */
const finalKeyframe = copyStyles(keyframe, true);
Object.keys(finalKeyframe).forEach(prop => {
/** @type {?} */
const value = finalKeyframe[prop];
if (value == ɵPRE_STYLE) {
preStyleProps.add(prop);
}
else if (value == AUTO_STYLE) {
postStyleProps.add(prop);
}
});
if (!isEmpty) {
finalKeyframe['offset'] = time / this.duration;
}
finalKeyframes.push(finalKeyframe);
});
/** @type {?} */
const preProps = preStyleProps.size ? iteratorToArray(preStyleProps.values()) : [];
/** @type {?} */
const postProps = postStyleProps.size ? iteratorToArray(postStyleProps.values()) : [];
// special case for a 0-second animation (which is designed just to place styles onscreen)
if (isEmpty) {
/** @type {?} */
const kf0 = finalKeyframes[0];
/** @type {?} */
const kf1 = copyObj(kf0);
kf0['offset'] = 0;
kf1['offset'] = 1;
finalKeyframes = [kf0, kf1];
}
return createTimelineInstruction(this.element, finalKeyframes, preProps, postProps, this.duration, this.startTime, this.easing, false);
}
}
class SubTimelineBuilder extends TimelineBuilder {
/**
* @param {?} driver
* @param {?} element
* @param {?} keyframes
* @param {?} preStyleProps
* @param {?} postStyleProps
* @param {?} timings
* @param {?=} _stretchStartingKeyframe
*/
constructor(driver, element, keyframes, preStyleProps, postStyleProps, timings, _stretchStartingKeyframe = false) {
super(driver, element, timings.delay);
this.element = element;
this.keyframes = keyframes;
this.preStyleProps = preStyleProps;
this.postStyleProps = postStyleProps;
this._stretchStartingKeyframe = _stretchStartingKeyframe;
this.timings = { duration: timings.duration, delay: timings.delay, easing: timings.easing };
}
/**
* @return {?}
*/
containsAnimation() { return this.keyframes.length > 1; }
/**
* @return {?}
*/
buildKeyframes() {
/** @type {?} */
let keyframes = this.keyframes;
let { delay, duration, easing } = this.timings;
if (this._stretchStartingKeyframe && delay) {
/** @type {?} */
const newKeyframes = [];
/** @type {?} */
const totalTime = duration + delay;
/** @type {?} */
const startingGap = delay / totalTime;
// the original starting keyframe now starts once the delay is done
/** @type {?} */
const newFirstKeyframe = copyStyles(keyframes[0], false);
newFirstKeyframe['offset'] = 0;
newKeyframes.push(newFirstKeyframe);
/** @type {?} */
const oldFirstKeyframe = copyStyles(keyframes[0], false);
oldFirstKeyframe['offset'] = roundOffset(startingGap);
newKeyframes.push(oldFirstKeyframe);
/*
When the keyframe is stretched then it means that the delay before the animation
starts is gone. Instead the first keyframe is placed at the start of the animation
and it is then copied to where it starts when the original delay is over. This basically
means nothing animates during that delay, but the styles are still renderered. For this
to work the original offset values that exist in the original keyframes must be "warped"
so that they can take the new keyframe + delay into account.
delay=1000, duration=1000, keyframes = 0 .5 1
turns into
delay=0, duration=2000, keyframes = 0 .33 .66 1
*/
// offsets between 1 ... n -1 are all warped by the keyframe stretch
/** @type {?} */
const limit = keyframes.length - 1;
for (let i = 1; i <= limit; i++) {
/** @type {?} */
let kf = copyStyles(keyframes[i], false);
/** @type {?} */
const oldOffset = (/** @type {?} */ (kf['offset']));
/** @type {?} */
const timeAtKeyframe = delay + oldOffset * duration;
kf['offset'] = roundOffset(timeAtKeyframe / totalTime);
newKeyframes.push(kf);
}
// the new starting keyframe should be added at the start
duration = totalTime;
delay = 0;
easing = '';
keyframes = newKeyframes;
}
return createTimelineInstruction(this.element, keyframes, this.preStyleProps, this.postStyleProps, duration, delay, easing, true);
}
}
/**
* @param {?} offset
* @param {?=} decimalPoints
* @return {?}
*/
function roundOffset(offset, decimalPoints = 3) {
/** @type {?} */
const mult = Math.pow(10, decimalPoints - 1);
return Math.round(offset * mult) / mult;
}
/**
* @param {?} input
* @param {?} allStyles
* @return {?}
*/
function flattenStyles(input, allStyles) {
/** @type {?} */
const styles = {};
/** @type {?} */
let allProperties;
input.forEach(token => {
if (token === '*') {
allProperties = allProperties || Object.keys(allStyles);
allProperties.forEach(prop => { styles[prop] = AUTO_STYLE; });
}
else {
copyStyles((/** @type {?} */ (token)), false, styles);
}
});
return styles;
}
/**
* @fileoverview added by tsickle
* @suppress {checkTypes,extraRequire,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
class Animation {
/**
* @param {?} _driver
* @param {?} input
*/
constructor(_driver, input) {
this._driver = _driver;
/** @type {?} */
const errors = [];
/** @type {?} */
const ast = buildAnimationAst(_driver, input, errors);
if (errors.length) {
/** @type {?} */
const errorMessage = `animation validation failed:\n${errors.join("\n")}`;
throw new Error(errorMessage);
}
this._animationAst = ast;
}
/**
* @param {?} element
* @param {?} startingStyles
* @param {?} destinationStyles
* @param {?} options
* @param {?=} subInstructions
* @return {?}
*/
buildTimelines(element, startingStyles, destinationStyles, options, subInstructions) {
/** @type {?} */
const start = Array.isArray(startingStyles) ? normalizeStyles(startingStyles) :
(/** @type {?} */ (startingStyles));
/** @type {?} */
const dest = Array.isArray(destinationStyles) ? normalizeStyles(destinationStyles) :
(/** @type {?} */ (destinationStyles));
/** @type {?} */
const errors = [];
subInstructions = subInstructions || new ElementInstructionMap();
/** @type {?} */
const result = buildAnimationTimelines(this._driver, element, this._animationAst, ENTER_CLASSNAME, LEAVE_CLASSNAME, start, dest, options, subInstructions, errors);
if (errors.length) {
/** @type {?} */
const errorMessage = `animation building failed:\n${errors.join("\n")}`;
throw new Error(errorMessage);
}
return result;
}
}
/**
* @fileoverview added by tsickle
* @suppress {checkTypes,extraRequire,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
/**
* \@publicApi
* @abstract
*/
class AnimationStyleNormalizer {
}
/**
* \@publicApi
*/
class NoopAnimationStyleNormalizer {
/**
* @param {?} propertyName
* @param {?} errors
* @return {?}
*/
normalizePropertyName(propertyName, errors) { return propertyName; }
/**
* @param {?} userProvidedProperty
* @param {?} normalizedProperty
* @param {?} value
* @param {?} errors
* @return {?}
*/
normalizeStyleValue(userProvidedProperty, normalizedProperty, value, errors) {
return (/** @type {?} */ (value));
}
}
/**
* @fileoverview added by tsickle
* @suppress {checkTypes,extraRequire,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
class WebAnimationsStyleNormalizer extends AnimationStyleNormalizer {
/**
* @param {?} propertyName
* @param {?} errors
* @return {?}
*/
normalizePropertyName(propertyName, errors) {
return dashCaseToCamelCase(propertyName);
}
/**
* @param {?} userProvidedProperty
* @param {?} normalizedProperty
* @param {?} value
* @param {?} errors
* @return {?}
*/
normalizeStyleValue(userProvidedProperty, normalizedProperty, value, errors) {
/** @type {?} */
let unit = '';
/** @type {?} */
const strVal = value.toString().trim();
if (DIMENSIONAL_PROP_MAP[normalizedProperty] && value !== 0 && value !== '0') {
if (typeof value === 'number') {
unit = 'px';
}
else {
/** @type {?} */
const valAndSuffixMatch = value.match(/^[+-]?[\d\.]+([a-z]*)$/);
if (valAndSuffixMatch && valAndSuffixMatch[1].length == 0) {
errors.push(`Please provide a CSS unit value for ${userProvidedProperty}:${value}`);
}
}
}
return strVal + unit;
}
}
/** @type {?} */
const DIMENSIONAL_PROP_MAP = makeBooleanMap('width,height,minWidth,minHeight,maxWidth,maxHeight,left,top,bottom,right,fontSize,outlineWidth,outlineOffset,paddingTop,paddingLeft,paddingBottom,paddingRight,marginTop,marginLeft,marginBottom,marginRight,borderRadius,borderWidth,borderTopWidth,borderLeftWidth,borderRightWidth,borderBottomWidth,textIndent,perspective'
.split(','));
/**
* @param {?} keys
* @return {?}
*/
function makeBooleanMap(keys) {
/** @type {?} */
const map = {};
keys.forEach(key => map[key] = true);
return map;
}
/**
* @fileoverview added by tsickle
* @suppress {checkTypes,extraRequire,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
/**
* @param {?} element
* @param {?} triggerName
* @param {?} fromState
* @param {?} toState
* @param {?} isRemovalTransition
* @param {?} fromStyles
* @param {?} toStyles
* @param {?} timelines
* @param {?} queriedElements
* @param {?} preStyleProps
* @param {?} postStyleProps
* @param {?} totalTime
* @param {?=} errors
* @return {?}
*/
function createTransitionInstruction(element, triggerName, fromState, toState, isRemovalTransition, fromStyles, toStyles, timelines, queriedElements, preStyleProps, postStyleProps, totalTime, errors) {
return {
type: 0 /* TransitionAnimation */,
element,
triggerName,
isRemovalTransition,
fromState,
fromStyles,
toState,
toStyles,
timelines,
queriedElements,
preStyleProps,
postStyleProps,
totalTime,
errors
};
}
/**
* @fileoverview added by tsickle
* @suppress {checkTypes,extraRequire,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
/** @type {?} */
const EMPTY_OBJECT = {};
class AnimationTransitionFactory {
/**
* @param {?} _triggerName
* @param {?} ast
* @param {?} _stateStyles
*/
constructor(_triggerName, ast, _stateStyles) {
this._triggerName = _triggerName;
this.ast = ast;
this._stateStyles = _stateStyles;
}
/**
* @param {?} currentState
* @param {?} nextState
* @param {?} element
* @param {?} params
* @return {?}
*/
match(currentState, nextState, element, params) {
return oneOrMoreTransitionsMatch(this.ast.matchers, currentState, nextState, element, params);
}
/**
* @param {?} stateName
* @param {?} params
* @param {?} errors
* @return {?}
*/
buildStyles(stateName, params, errors) {
/** @type {?} */
const backupStateStyler = this._stateStyles['*'];
/** @type {?} */
const stateStyler = this._stateStyles[stateName];
/** @type {?} */
const backupStyles = backupStateStyler ? backupStateStyler.buildStyles(params, errors) : {};
return stateStyler ? stateStyler.buildStyles(params, errors) : backupStyles;
}
/**
* @param {?} driver
* @param {?} element
* @param {?} currentState
* @param {?} nextState
* @param {?} enterClassName
* @param {?} leaveClassName
* @param {?=} currentOptions
* @param {?=} nextOptions
* @param {?=} subInstructions
* @param {?=} skipAstBuild
* @return {?}
*/
build(driver, element, currentState, nextState, enterClassName, leaveClassName, currentOptions, nextOptions, subInstructions, skipAstBuild) {
/** @type {?} */
const errors = [];
/** @type {?} */
const transitionAnimationParams = this.ast.options && this.ast.options.params || EMPTY_OBJECT;
/** @type {?} */
const currentAnimationParams = currentOptions && currentOptions.params || EMPTY_OBJECT;
/** @type {?} */
const currentStateStyles = this.buildStyles(currentState, currentAnimationParams, errors);
/** @type {?} */
const nextAnimationParams = nextOptions && nextOptions.params || EMPTY_OBJECT;
/** @type {?} */
const nextStateStyles = this.buildStyles(nextState, nextAnimationParams, errors);
/** @type {?} */
const queriedElements = new Set();
/** @type {?} */
const preStyleMap = new Map();
/** @type {?} */
const postStyleMap = new Map();
/** @type {?} */
const isRemoval = nextState === 'void';
/** @type {?} */
const animationOptions = { params: Object.assign({}, transitionAnimationParams, nextAnimationParams) };
/** @type {?} */
const timelines = skipAstBuild ? [] : buildAnimationTimelines(driver, element, this.ast.animation, enterClassName, leaveClassName, currentStateStyles, nextStateStyles, animationOptions, subInstructions, errors);
/** @type {?} */
let totalTime = 0;
timelines.forEach(tl => { totalTime = Math.max(tl.duration + tl.delay, totalTime); });
if (errors.length) {
return createTransitionInstruction(element, this._triggerName, currentState, nextState, isRemoval, currentStateStyles, nextStateStyles, [], [], preStyleMap, postStyleMap, totalTime, errors);
}
timelines.forEach(tl => {
/** @type {?} */
const elm = tl.element;
/** @type {?} */
const preProps = getOrSetAsInMap(preStyleMap, elm, {});
tl.preStyleProps.forEach(prop => preProps[prop] = true);
/** @type {?} */
const postProps = getOrSetAsInMap(postStyleMap, elm, {});
tl.postStyleProps.forEach(prop => postProps[prop] = true);
if (elm !== element) {
queriedElements.add(elm);
}
});
/** @type {?} */
const queriedElementsList = iteratorToArray(queriedElements.values());
return createTransitionInstruction(element, this._triggerName, currentState, nextState, isRemoval, currentStateStyles, nextStateStyles, timelines, queriedElementsList, preStyleMap, postStyleMap, totalTime);
}
}
/**
* @param {?} matchFns
* @param {?} currentState
* @param {?} nextState
* @param {?} element
* @param {?} params
* @return {?}
*/
function oneOrMoreTransitionsMatch(matchFns, currentState, nextState, element, params) {
return matchFns.some(fn => fn(currentState, nextState, element, params));
}
class AnimationStateStyles {
/**
* @param {?} styles
* @param {?} defaultParams
*/
constructor(styles, defaultParams) {
this.styles = styles;
this.defaultParams = defaultParams;
}
/**
* @param {?} params
* @param {?} errors
* @return {?}
*/
buildStyles(params, errors) {
/** @type {?} */
const finalStyles = {};
/** @type {?} */
const combinedParams = copyObj(this.defaultParams);
Object.keys(params).forEach(key => {
/** @type {?} */
const value = params[key];
if (value != null) {
combinedParams[key] = value;
}
});
this.styles.styles.forEach(value => {
if (typeof value !== 'string') {
/** @type {?} */
const styleObj = (/** @type {?} */ (value));
Object.keys(styleObj).forEach(prop => {
/** @type {?} */
let val = styleObj[prop];
if (val.length > 1) {
val = interpolateParams(val, combinedParams, errors);
}
finalStyles[prop] = val;
});
}
});
return finalStyles;
}
}
/**
* @fileoverview added by tsickle
* @suppress {checkTypes,extraRequire,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
/**
* \@publicApi
* @param {?} name
* @param {?} ast
* @return {?}
*/
function buildTrigger(name, ast) {
return new AnimationTrigger(name, ast);
}
/**
* \@publicApi
*/
class AnimationTrigger {
/**
* @param {?} name
* @param {?} ast
*/
constructor(name, ast) {
this.name = name;
this.ast = ast;
this.transitionFactories = [];
this.states = {};
ast.states.forEach(ast => {
/** @type {?} */
const defaultParams = (ast.options && ast.options.params) || {};
this.states[ast.name] = new AnimationStateStyles(ast.style, defaultParams);
});
balanceProperties(this.states, 'true', '1');
balanceProperties(this.states, 'false', '0');
ast.transitions.forEach(ast => {
this.transitionFactories.push(new AnimationTransitionFactory(name, ast, this.states));
});
this.fallbackTransition = createFallbackTransition(name, this.states);
}
/**
* @return {?}
*/
get containsQueries() { return this.ast.queryCount > 0; }
/**
* @param {?} currentState
* @param {?} nextState
* @param {?} element
* @param {?} params
* @return {?}
*/
matchTransition(currentState, nextState, element, params) {
/** @type {?} */
const entry = this.transitionFactories.find(f => f.match(currentState, nextState, element, params));
return entry || null;
}
/**
* @param {?} currentState
* @param {?} params
* @param {?} errors
* @return {?}
*/
matchStyles(currentState, params, errors) {
return this.fallbackTransition.buildStyles(currentState, params, errors);
}
}
/**
* @param {?} triggerName
* @param {?} states
* @return {?}
*/
function createFallbackTransition(triggerName, states) {
/** @type {?} */
const matchers = [(fromState, toState) => true];
/** @type {?} */
const animation = { type: 2 /* Sequence */, steps: [], options: null };
/** @type {?} */
const transition = {
type: 1 /* Transition */,
animation,
matchers,
options: null,
queryCount: 0,
depCount: 0
};
return new AnimationTransitionFactory(triggerName, transition, states);
}
/**
* @param {?} obj
* @param {?} key1
* @param {?} key2
* @return {?}
*/
function balanceProperties(obj, key1, key2) {
if (obj.hasOwnProperty(key1)) {
if (!obj.hasOwnProperty(key2)) {
obj[key2] = obj[key1];
}
}
else if (obj.hasOwnProperty(key2)) {
obj[key1] = obj[key2];
}
}
/**
* @fileoverview added by tsickle
* @suppress {checkTypes,extraRequire,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
/** @type {?} */
const EMPTY_INSTRUCTION_MAP = new ElementInstructionMap();
class TimelineAnimationEngine {
/**
* @param {?} bodyNode
* @param {?} _driver
* @param {?} _normalizer
*/
constructor(bodyNode, _driver, _normalizer) {
this.bodyNode = bodyNode;
this._driver = _driver;
this._normalizer = _normalizer;
this._animations = {};
this._playersById = {};
this.players = [];
}
/**
* @param {?} id
* @param {?} metadata
* @return {?}
*/
register(id, metadata) {
/** @type {?} */
const errors = [];
/** @type {?} */
const ast = buildAnimationAst(this._driver, metadata, errors);
if (errors.length) {
throw new Error(`Unable to build the animation due to the following errors: ${errors.join("\n")}`);
}
else {
this._animations[id] = ast;
}
}
/**
* @private
* @param {?} i
* @param {?} preStyles
* @param {?=} postStyles
* @return {?}
*/
_buildPlayer(i, preStyles, postStyles) {
/** @type {?} */
const element = i.element;
/** @type {?} */
const keyframes = normalizeKeyframes(this._driver, this._normalizer, element, i.keyframes, preStyles, postStyles);
return this._driver.animate(element, keyframes, i.duration, i.delay, i.easing, [], true);
}
/**
* @param {?} id
* @param {?} element
* @param {?=} options
* @return {?}
*/
create(id, element, options = {}) {
/** @type {?} */
const errors = [];
/** @type {?} */
const ast = this._animations[id];
/** @type {?} */
let instructions;
/** @type {?} */
const autoStylesMap = new Map();
if (ast) {
instructions = buildAnimationTimelines(this._driver, element, ast, ENTER_CLASSNAME, LEAVE_CLASSNAME, {}, {}, options, EMPTY_INSTRUCTION_MAP, errors);
instructions.forEach(inst => {
/** @type {?} */
const styles = getOrSetAsInMap(autoStylesMap, inst.element, {});
inst.postStyleProps.forEach(prop => styles[prop] = null);
});
}
else {
errors.push('The requested animation doesn\'t exist or has already been destroyed');
instructions = [];
}
if (errors.length) {
throw new Error(`Unable to create the animation due to the following errors: ${errors.join("\n")}`);
}
autoStylesMap.forEach((styles, element) => {
Object.keys(styles).forEach(prop => { styles[prop] = this._driver.computeStyle(element, prop, AUTO_STYLE); });
});
/** @type {?} */
const players = instructions.map(i => {
/** @type {?} */
const styles = autoStylesMap.get(i.element);
return this._buildPlayer(i, {}, styles);
});
/** @type {?} */
const player = optimizeGroupPlayer(players);
this._playersById[id] = player;
player.onDestroy(() => this.destroy(id));
this.players.push(player);
return player;
}
/**
* @param {?} id
* @return {?}
*/
destroy(id) {
/** @type {?} */
const player = this._getPlayer(id);
player.destroy();
delete this._playersById[id];
/** @type {?} */
const index = this.players.indexOf(player);
if (index >= 0) {
this.players.splice(index, 1);
}
}
/**
* @private
* @param {?} id
* @return {?}
*/
_getPlayer(id) {
/** @type {?} */
const player = this._playersById[id];
if (!player) {
throw new Error(`Unable to find the timeline player referenced by ${id}`);
}
return player;
}
/**
* @param {?} id
* @param {?} element
* @param {?} eventName
* @param {?} callback
* @return {?}
*/
listen(id, element, eventName, callback) {
// triggerName, fromState, toState are all ignored for timeline animations
/** @type {?} */
const baseEvent = makeAnimationEvent(element, '', '', '');
listenOnPlayer(this._getPlayer(id), eventName, baseEvent, callback);
return () => { };
}
/**
* @param {?} id
* @param {?} element
* @param {?} command
* @param {?} args
* @return {?}
*/
command(id, element, command, args) {
if (command == 'register') {
this.register(id, (/** @type {?} */ (args[0])));
return;
}
if (command == 'create') {
/** @type {?} */
const options = (/** @type {?} */ ((args[0] || {})));
this.create(id, element, options);
return;
}
/** @type {?} */
const player = this._getPlayer(id);
switch (command) {
case 'play':
player.play();
break;
case 'pause':
player.pause();
break;
case 'reset':
player.reset();
break;
case 'restart':
player.restart();
break;
case 'finish':
player.finish();
break;
case 'init':
player.init();
break;
case 'setPosition':
player.setPosition(parseFloat((/** @type {?} */ (args[0]))));
break;
case 'destroy':
this.destroy(id);
break;
}
}
}
/**
* @fileoverview added by tsickle
* @suppress {checkTypes,extraRequire,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
/** @type {?} */
const QUEUED_CLASSNAME = 'ng-animate-queued';
/** @type {?} */
const QUEUED_SELECTOR = '.ng-animate-queued';
/** @type {?} */
const DISABLED_CLASSNAME = 'ng-animate-disabled';
/** @type {?} */
const DISABLED_SELECTOR = '.ng-animate-disabled';
/** @type {?} */
const STAR_CLASSNAME = 'ng-star-inserted';
/** @type {?} */
const STAR_SELECTOR = '.ng-star-inserted';
/** @type {?} */
const EMPTY_PLAYER_ARRAY = [];
/** @type {?} */
const NULL_REMOVAL_STATE = {
namespaceId: '',
setForRemoval: false,
setForMove: false,
hasAnimation: false,
removedBeforeQueried: false
};
/** @type {?} */
const NULL_REMOVED_QUERIED_STATE = {
namespaceId: '',
setForMove: false,
setForRemoval: false,
hasAnimation: false,
removedBeforeQueried: true
};
/** @type {?} */
const REMOVAL_FLAG = '__ng_removed';
class StateValue {
/**
* @param {?} input
* @param {?=} namespaceId
*/
constructor(input, namespaceId = '') {
this.namespaceId = namespaceId;
/** @type {?} */
const isObj = input && input.hasOwnProperty('value');
/** @type {?} */
const value = isObj ? input['value'] : input;
this.value = normalizeTriggerValue(value);
if (isObj) {
/** @type {?} */
const options = copyObj((/** @type {?} */ (input)));
delete options['value'];
this.options = (/** @type {?} */ (options));
}
else {
this.options = {};
}
if (!this.options.params) {
this.options.params = {};
}
}
/**
* @return {?}
*/
get params() { return (/** @type {?} */ (this.options.params)); }
/**
* @param {?} options
* @return {?}
*/
absorbOptions(options) {
/** @type {?} */
const newParams = options.params;
if (newParams) {
/** @type {?} */
const oldParams = (/** @type {?} */ (this.options.params));
Object.keys(newParams).forEach(prop => {
if (oldParams[prop] == null) {
oldParams[prop] = newParams[prop];
}
});
}
}
}
/** @type {?} */
const VOID_VALUE = 'void';
/** @type {?} */
const DEFAULT_STATE_VALUE = new StateValue(VOID_VALUE);
class AnimationTransitionNamespace {
/**
* @param {?} id
* @param {?} hostElement
* @param {?} _engine
*/
constructor(id, hostElement, _engine) {
this.id = id;
this.hostElement = hostElement;
this._engine = _engine;
this.players = [];
this._triggers = {};
this._queue = [];
this._elementListeners = new Map();
this._hostClassName = 'ng-tns-' + id;
addClass(hostElement, this._hostClassName);
}
/**
* @param {?} element
* @param {?} name
* @param {?} phase
* @param {?} callback
* @return {?}
*/
listen(element, name, phase, callback) {
if (!this._triggers.hasOwnProperty(name)) {
throw new Error(`Unable to listen on the animation trigger event "${phase}" because the animation trigger "${name}" doesn\'t exist!`);
}
if (phase == null || phase.length == 0) {
throw new Error(`Unable to listen on the animation trigger "${name}" because the provided event is undefined!`);
}
if (!isTriggerEventValid(phase)) {
throw new Error(`The provided animation trigger event "${phase}" for the animation trigger "${name}" is not supported!`);
}
/** @type {?} */
const listeners = getOrSetAsInMap(this._elementListeners, element, []);
/** @type {?} */
const data = { name, phase, callback };
listeners.push(data);
/** @type {?} */
const triggersWithStates = getOrSetAsInMap(this._engine.statesByElement, element, {});
if (!triggersWithStates.hasOwnProperty(name)) {
addClass(element, NG_TRIGGER_CLASSNAME);
addClass(element, NG_TRIGGER_CLASSNAME + '-' + name);
triggersWithStates[name] = DEFAULT_STATE_VALUE;
}
return () => {
// the event listener is removed AFTER the flush has occurred such
// that leave animations callbacks can fire (otherwise if the node
// is removed in between then the listeners would be deregistered)
this._engine.afterFlush(() => {
/** @type {?} */
const index = listeners.indexOf(data);
if (index >= 0) {
listeners.splice(index, 1);
}
if (!this._triggers[name]) {
delete triggersWithStates[name];
}
});
};
}
/**
* @param {?} name
* @param {?} ast
* @return {?}
*/
register(name, ast) {
if (this._triggers[name]) {
// throw
return false;
}
else {
this._triggers[name] = ast;
return true;
}
}
/**
* @private
* @param {?} name
* @return {?}
*/
_getTrigger(name) {
/** @type {?} */
const trigger = this._triggers[name];
if (!trigger) {
throw new Error(`The provided animation trigger "${name}" has not been registered!`);
}
return trigger;
}
/**
* @param {?} element
* @param {?} triggerName
* @param {?} value
* @param {?=} defaultToFallback
* @return {?}
*/
trigger(element, triggerName, value, defaultToFallback = true) {
/** @type {?} */
const trigger = this._getTrigger(triggerName);
/** @type {?} */
const player = new TransitionAnimationPlayer(this.id, triggerName, element);
/** @type {?} */
let triggersWithStates = this._engine.statesByElement.get(element);
if (!triggersWithStates) {
addClass(element, NG_TRIGGER_CLASSNAME);
addClass(element, NG_TRIGGER_CLASSNAME + '-' + triggerName);
this._engine.statesByElement.set(element, triggersWithStates = {});
}
/** @type {?} */
let fromState = triggersWithStates[triggerName];
/** @type {?} */
const toState = new StateValue(value, this.id);
/** @type {?} */
const isObj = value && value.hasOwnProperty('value');
if (!isObj && fromState) {
toState.absorbOptions(fromState.options);
}
triggersWithStates[triggerName] = toState;
if (!fromState) {
fromState = DEFAULT_STATE_VALUE;
}
/** @type {?} */
const isRemoval = toState.value === VOID_VALUE;
// normally this isn't reached by here, however, if an object expression
// is passed in then it may be a new object each time. Comparing the value
// is important since that will stay the same despite there being a new object.
// The removal arc here is special cased because the same element is triggered
// twice in the event that it contains animations on the outer/inner portions
// of the host container
if (!isRemoval && fromState.value === toState.value) {
// this means that despite the value not changing, some inner params
// have changed which means that the animation final styles need to be applied
if (!objEquals(fromState.params, toState.params)) {
/** @type {?} */
const errors = [];
/** @type {?} */
const fromStyles = trigger.matchStyles(fromState.value, fromState.params, errors);
/** @type {?} */
const toStyles = trigger.matchStyles(toState.value, toState.params, errors);
if (errors.length) {
this._engine.reportError(errors);
}
else {
this._engine.afterFlush(() => {
eraseStyles(element, fromStyles);
setStyles(element, toStyles);
});
}
}
return;
}
/** @type {?} */
const playersOnElement = getOrSetAsInMap(this._engine.playersByElement, element, []);
playersOnElement.forEach(player => {
// only remove the player if it is queued on the EXACT same trigger/namespace
// we only also deal with queued players here because if the animation has
// started then we want to keep the player alive until the flush happens
// (which is where the previousPlayers are passed into the new palyer)
if (player.namespaceId == this.id && player.triggerName == triggerName && player.queued) {
player.destroy();
}
});
/** @type {?} */
let transition = trigger.matchTransition(fromState.value, toState.value, element, toState.params);
/** @type {?} */
let isFallbackTransition = false;
if (!transition) {
if (!defaultToFallback)
return;
transition = trigger.fallbackTransition;
isFallbackTransition = true;
}
this._engine.totalQueuedPlayers++;
this._queue.push({ element, triggerName, transition, fromState, toState, player, isFallbackTransition });
if (!isFallbackTransition) {
addClass(element, QUEUED_CLASSNAME);
player.onStart(() => { removeClass(element, QUEUED_CLASSNAME); });
}
player.onDone(() => {
/** @type {?} */
let index = this.players.indexOf(player);
if (index >= 0) {
this.players.splice(index, 1);
}
/** @type {?} */
const players = this._engine.playersByElement.get(element);
if (players) {
/** @type {?} */
let index = players.indexOf(player);
if (index >= 0) {
players.splice(index, 1);
}
}
});
this.players.push(player);
playersOnElement.push(player);
return player;
}
/**
* @param {?} name
* @return {?}
*/
deregister(name) {
delete this._triggers[name];
this._engine.statesByElement.forEach((stateMap, element) => { delete stateMap[name]; });
this._elementListeners.forEach((listeners, element) => {
this._elementListeners.set(element, listeners.filter(entry => { return entry.name != name; }));
});
}
/**
* @param {?} element
* @return {?}
*/
clearElementCache(element) {
this._engine.statesByElement.delete(element);
this._elementListeners.delete(element);
/** @type {?} */
const elementPlayers = this._engine.playersByElement.get(element);
if (elementPlayers) {
elementPlayers.forEach(player => player.destroy());
this._engine.playersByElement.delete(element);
}
}
/**
* @private
* @param {?} rootElement
* @param {?} context
* @param {?=} animate
* @return {?}
*/
_signalRemovalForInnerTriggers(rootElement, context, animate = false) {
// emulate a leave animation for all inner nodes within this node.
// If there are no animations found for any of the nodes then clear the cache
// for the element.
this._engine.driver.query(rootElement, NG_TRIGGER_SELECTOR, true).forEach(elm => {
// this means that an inner remove() operation has already kicked off
// the animation on this element...
if (elm[REMOVAL_FLAG])
return;
/** @type {?} */
const namespaces = this._engine.fetchNamespacesByElement(elm);
if (namespaces.size) {
namespaces.forEach(ns => ns.triggerLeaveAnimation(elm, context, false, true));
}
else {
this.clearElementCache(elm);
}
});
}
/**
* @param {?} element
* @param {?} context
* @param {?=} destroyAfterComplete
* @param {?=} defaultToFallback
* @return {?}
*/
triggerLeaveAnimation(element, context, destroyAfterComplete, defaultToFallback) {
/** @type {?} */
const triggerStates = this._engine.statesByElement.get(element);
if (triggerStates) {
/** @type {?} */
const players = [];
Object.keys(triggerStates).forEach(triggerName => {
// this check is here in the event that an element is removed
// twice (both on the host level and the component level)
if (this._triggers[triggerName]) {
/** @type {?} */
const player = this.trigger(element, triggerName, VOID_VALUE, defaultToFallback);
if (player) {
players.push(player);
}
}
});
if (players.length) {
this._engine.markElementAsRemoved(this.id, element, true, context);
if (destroyAfterComplete) {
optimizeGroupPlayer(players).onDone(() => this._engine.processLeaveNode(element));
}
return true;
}
}
return false;
}
/**
* @param {?} element
* @return {?}
*/
prepareLeaveAnimationListeners(element) {
/** @type {?} */
const listeners = this._elementListeners.get(element);
if (listeners) {
/** @type {?} */
const visitedTriggers = new Set();
listeners.forEach(listener => {
/** @type {?} */
const triggerName = listener.name;
if (visitedTriggers.has(triggerName))
return;
visitedTriggers.add(triggerName);
/** @type {?} */
const trigger = this._triggers[triggerName];
/** @type {?} */
const transition = trigger.fallbackTransition;
/** @type {?} */
const elementStates = (/** @type {?} */ (this._engine.statesByElement.get(element)));
/** @type {?} */
const fromState = elementStates[triggerName] || DEFAULT_STATE_VALUE;
/** @type {?} */
const toState = new StateValue(VOID_VALUE);
/** @type {?} */
const player = new TransitionAnimationPlayer(this.id, triggerName, element);
this._engine.totalQueuedPlayers++;
this._queue.push({
element,
triggerName,
transition,
fromState,
toState,
player,
isFallbackTransition: true
});
});
}
}
/**
* @param {?} element
* @param {?} context
* @return {?}
*/
removeNode(element, context) {
/** @type {?} */
const engine = this._engine;
if (element.childElementCount) {
this._signalRemovalForInnerTriggers(element, context, true);
}
// this means that a * => VOID animation was detected and kicked off
if (this.triggerLeaveAnimation(element, context, true))
return;
// find the player that is animating and make sure that the
// removal is delayed until that player has completed
/** @type {?} */
let containsPotentialParentTransition = false;
if (engine.totalAnimations) {
/** @type {?} */
const currentPlayers = engine.players.length ? engine.playersByQueriedElement.get(element) : [];
// when this `if statement` does not continue forward it means that
// a previous animation query has selected the current element and
// is animating it. In this situation want to continue forwards and
// allow the element to be queued up for animation later.
if (currentPlayers && currentPlayers.length) {
containsPotentialParentTransition = true;
}
else {
/** @type {?} */
let parent = element;
while (parent = parent.parentNode) {
/** @type {?} */
const triggers = engine.statesByElement.get(parent);
if (triggers) {
containsPotentialParentTransition = true;
break;
}
}
}
}
// at this stage we know that the element will either get removed
// during flush or will be picked up by a parent query. Either way
// we need to fire the listeners for this element when it DOES get
// removed (once the query parent animation is done or after flush)
this.prepareLeaveAnimationListeners(element);
// whether or not a parent has an animation we need to delay the deferral of the leave
// operation until we have more information (which we do after flush() has been called)
if (containsPotentialParentTransition) {
engine.markElementAsRemoved(this.id, element, false, context);
}
else {
// we do this after the flush has occurred such
// that the callbacks can be fired
engine.afterFlush(() => this.clearElementCache(element));
engine.destroyInnerAnimations(element);
engine._onRemovalComplete(element, context);
}
}
/**
* @param {?} element
* @param {?} parent
* @return {?}
*/
insertNode(element, parent) { addClass(element, this._hostClassName); }
/**
* @param {?} microtaskId
* @return {?}
*/
drainQueuedTransitions(microtaskId) {
/** @type {?} */
const instructions = [];
this._queue.forEach(entry => {
/** @type {?} */
const player = entry.player;
if (player.destroyed)
return;
/** @type {?} */
const element = entry.element;
/** @type {?} */
const listeners = this._elementListeners.get(element);
if (listeners) {
listeners.forEach((listener) => {
if (listener.name == entry.triggerName) {
/** @type {?} */
const baseEvent = makeAnimationEvent(element, entry.triggerName, entry.fromState.value, entry.toState.value);
((/** @type {?} */ (baseEvent)))['_data'] = microtaskId;
listenOnPlayer(entry.player, listener.phase, baseEvent, listener.callback);
}
});
}
if (player.markedForDestroy) {
this._engine.afterFlush(() => {
// now we can destroy the element properly since the event listeners have
// been bound to the player
player.destroy();
});
}
else {
instructions.push(entry);
}
});
this._queue = [];
return instructions.sort((a, b) => {
// if depCount == 0 them move to front
// otherwise if a contains b then move back
/** @type {?} */
const d0 = a.transition.ast.depCount;
/** @type {?} */
const d1 = b.transition.ast.depCount;
if (d0 == 0 || d1 == 0) {
return d0 - d1;
}
return this._engine.driver.containsElement(a.element, b.element) ? 1 : -1;
});
}
/**
* @param {?} context
* @return {?}
*/
destroy(context) {
this.players.forEach(p => p.destroy());
this._signalRemovalForInnerTriggers(this.hostElement, context);
}
/**
* @param {?} element
* @return {?}
*/
elementContainsData(element) {
/** @type {?} */
let containsData = false;
if (this._elementListeners.has(element))
containsData = true;
containsData =
(this._queue.find(entry => entry.element === element) ? true : false) || containsData;
return containsData;
}
}
class TransitionAnimationEngine {
/**
* @param {?} bodyNode
* @param {?} driver
* @param {?} _normalizer
*/
constructor(bodyNode, driver, _normalizer) {
this.bodyNode = bodyNode;
this.driver = driver;
this._normalizer = _normalizer;
this.players = [];
this.newHostElements = new Map();
this.playersByElement = new Map();
this.playersByQueriedElement = new Map();
this.statesByElement = new Map();
this.disabledNodes = new Set();
this.totalAnimations = 0;
this.totalQueuedPlayers = 0;
this._namespaceLookup = {};
this._namespaceList = [];
this._flushFns = [];
this._whenQuietFns = [];
this.namespacesByHostElement = new Map();
this.collectedEnterElements = [];
this.collectedLeaveElements = [];
// this method is designed to be overridden by the code that uses this engine
this.onRemovalComplete = (element, context) => { };
}
/**
* \@internal
* @param {?} element
* @param {?} context
* @return {?}
*/
_onRemovalComplete(element, context) { this.onRemovalComplete(element, context); }
/**
* @return {?}
*/
get queuedPlayers() {
/** @type {?} */
const players = [];
this._namespaceList.forEach(ns => {
ns.players.forEach(player => {
if (player.queued) {
players.push(player);
}
});
});
return players;
}
/**
* @param {?} namespaceId
* @param {?} hostElement
* @return {?}
*/
createNamespace(namespaceId, hostElement) {
/** @type {?} */
const ns = new AnimationTransitionNamespace(namespaceId, hostElement, this);
if (hostElement.parentNode) {
this._balanceNamespaceList(ns, hostElement);
}
else {
// defer this later until flush during when the host element has
// been inserted so that we know exactly where to place it in
// the namespace list
this.newHostElements.set(hostElement, ns);
// given that this host element is apart of the animation code, it
// may or may not be inserted by a parent node that is an of an
// animation renderer type. If this happens then we can still have
// access to this item when we query for :enter nodes. If the parent
// is a renderer then the set data-structure will normalize the entry
this.collectEnterElement(hostElement);
}
return this._namespaceLookup[namespaceId] = ns;
}
/**
* @private
* @param {?} ns
* @param {?} hostElement
* @return {?}
*/
_balanceNamespaceList(ns, hostElement) {
/** @type {?} */
const limit = this._namespaceList.length - 1;
if (limit >= 0) {
/** @type {?} */
let found = false;
for (let i = limit; i >= 0; i--) {
/** @type {?} */
const nextNamespace = this._namespaceList[i];
if (this.driver.containsElement(nextNamespace.hostElement, hostElement)) {
this._namespaceList.splice(i + 1, 0, ns);
found = true;
break;
}
}
if (!found) {
this._namespaceList.splice(0, 0, ns);
}
}
else {
this._namespaceList.push(ns);
}
this.namespacesByHostElement.set(hostElement, ns);
return ns;
}
/**
* @param {?} namespaceId
* @param {?} hostElement
* @return {?}
*/
register(namespaceId, hostElement) {
/** @type {?} */
let ns = this._namespaceLookup[namespaceId];
if (!ns) {
ns = this.createNamespace(namespaceId, hostElement);
}
return ns;
}
/**
* @param {?} namespaceId
* @param {?} name
* @param {?} trigger
* @return {?}
*/
registerTrigger(namespaceId, name, trigger) {
/** @type {?} */
let ns = this._namespaceLookup[namespaceId];
if (ns && ns.register(name, trigger)) {
this.totalAnimations++;
}
}
/**
* @param {?} namespaceId
* @param {?} context
* @return {?}
*/
destroy(namespaceId, context) {
if (!namespaceId)
return;
/** @type {?} */
const ns = this._fetchNamespace(namespaceId);
this.afterFlush(() => {
this.namespacesByHostElement.delete(ns.hostElement);
delete this._namespaceLookup[namespaceId];
/** @type {?} */
const index = this._namespaceList.indexOf(ns);
if (index >= 0) {
this._namespaceList.splice(index, 1);
}
});
this.afterFlushAnimationsDone(() => ns.destroy(context));
}
/**
* @private
* @param {?} id
* @return {?}
*/
_fetchNamespace(id) { return this._namespaceLookup[id]; }
/**
* @param {?} element
* @return {?}
*/
fetchNamespacesByElement(element) {
// normally there should only be one namespace per element, however
// if @triggers are placed on both the component element and then
// its host element (within the component code) then there will be
// two namespaces returned. We use a set here to simply the dedupe
// of namespaces incase there are multiple triggers both the elm and host
/** @type {?} */
const namespaces = new Set();
/** @type {?} */
const elementStates = this.statesByElement.get(element);
if (elementStates) {
/** @type {?} */
const keys = Object.keys(elementStates);
for (let i = 0; i < keys.length; i++) {
/** @type {?} */
const nsId = elementStates[keys[i]].namespaceId;
if (nsId) {
/** @type {?} */
const ns = this._fetchNamespace(nsId);
if (ns) {
namespaces.add(ns);
}
}
}
}
return namespaces;
}
/**
* @param {?} namespaceId
* @param {?} element
* @param {?} name
* @param {?} value
* @return {?}
*/
trigger(namespaceId, element, name, value) {
if (isElementNode(element)) {
/** @type {?} */
const ns = this._fetchNamespace(namespaceId);
if (ns) {
ns.trigger(element, name, value);
return true;
}
}
return false;
}
/**
* @param {?} namespaceId
* @param {?} element
* @param {?} parent
* @param {?} insertBefore
* @return {?}
*/
insertNode(namespaceId, element, parent, insertBefore) {
if (!isElementNode(element))
return;
// special case for when an element is removed and reinserted (move operation)
// when this occurs we do not want to use the element for deletion later
/** @type {?} */
const details = (/** @type {?} */ (element[REMOVAL_FLAG]));
if (details && details.setForRemoval) {
details.setForRemoval = false;
details.setForMove = true;
/** @type {?} */
const index = this.collectedLeaveElements.indexOf(element);
if (index >= 0) {
this.collectedLeaveElements.splice(index, 1);
}
}
// in the event that the namespaceId is blank then the caller
// code does not contain any animation code in it, but it is
// just being called so that the node is marked as being inserted
if (namespaceId) {
/** @type {?} */
const ns = this._fetchNamespace(namespaceId);
// This if-statement is a workaround for router issue #21947.
// The router sometimes hits a race condition where while a route
// is being instantiated a new navigation arrives, triggering leave
// animation of DOM that has not been fully initialized, until this
// is resolved, we need to handle the scenario when DOM is not in a
// consistent state during the animation.
if (ns) {
ns.insertNode(element, parent);
}
}
// only *directives and host elements are inserted before
if (insertBefore) {
this.collectEnterElement(element);
}
}
/**
* @param {?} element
* @return {?}
*/
collectEnterElement(element) { this.collectedEnterElements.push(element); }
/**
* @param {?} element
* @param {?} value
* @return {?}
*/
markElementAsDisabled(element, value) {
if (value) {
if (!this.disabledNodes.has(element)) {
this.disabledNodes.add(element);
addClass(element, DISABLED_CLASSNAME);
}
}
else if (this.disabledNodes.has(element)) {
this.disabledNodes.delete(element);
removeClass(element, DISABLED_CLASSNAME);
}
}
/**
* @param {?} namespaceId
* @param {?} element
* @param {?} context
* @return {?}
*/
removeNode(namespaceId, element, context) {
if (!isElementNode(element)) {
this._onRemovalComplete(element, context);
return;
}
/** @type {?} */
const ns = namespaceId ? this._fetchNamespace(namespaceId) : null;
if (ns) {
ns.removeNode(element, context);
}
else {
this.markElementAsRemoved(namespaceId, element, false, context);
}
}
/**
* @param {?} namespaceId
* @param {?} element
* @param {?=} hasAnimation
* @param {?=} context
* @return {?}
*/
markElementAsRemoved(namespaceId, element, hasAnimation, context) {
this.collectedLeaveElements.push(element);
element[REMOVAL_FLAG] = {
namespaceId,
setForRemoval: context, hasAnimation,
removedBeforeQueried: false
};
}
/**
* @param {?} namespaceId
* @param {?} element
* @param {?} name
* @param {?} phase
* @param {?} callback
* @return {?}
*/
listen(namespaceId, element, name, phase, callback) {
if (isElementNode(element)) {
return this._fetchNamespace(namespaceId).listen(element, name, phase, callback);
}
return () => { };
}
/**
* @private
* @param {?} entry
* @param {?} subTimelines
* @param {?} enterClassName
* @param {?} leaveClassName
* @param {?=} skipBuildAst
* @return {?}
*/
_buildInstruction(entry, subTimelines, enterClassName, leaveClassName, skipBuildAst) {
return entry.transition.build(this.driver, entry.element, entry.fromState.value, entry.toState.value, enterClassName, leaveClassName, entry.fromState.options, entry.toState.options, subTimelines, skipBuildAst);
}
/**
* @param {?} containerElement
* @return {?}
*/
destroyInnerAnimations(containerElement) {
/** @type {?} */
let elements = this.driver.query(containerElement, NG_TRIGGER_SELECTOR, true);
elements.forEach(element => this.destroyActiveAnimationsForElement(element));
if (this.playersByQueriedElement.size == 0)
return;
elements = this.driver.query(containerElement, NG_ANIMATING_SELECTOR, true);
elements.forEach(element => this.finishActiveQueriedAnimationOnElement(element));
}
/**
* @param {?} element
* @return {?}
*/
destroyActiveAnimationsForElement(element) {
/** @type {?} */
const players = this.playersByElement.get(element);
if (players) {
players.forEach(player => {
// special case for when an element is set for destruction, but hasn't started.
// in this situation we want to delay the destruction until the flush occurs
// so that any event listeners attached to the player are triggered.
if (player.queued) {
player.markedForDestroy = true;
}
else {
player.destroy();
}
});
}
}
/**
* @param {?} element
* @return {?}
*/
finishActiveQueriedAnimationOnElement(element) {
/** @type {?} */
const players = this.playersByQueriedElement.get(element);
if (players) {
players.forEach(player => player.finish());
}
}
/**
* @return {?}
*/
whenRenderingDone() {
return new Promise(resolve => {
if (this.players.length) {
return optimizeGroupPlayer(this.players).onDone(() => resolve());
}
else {
resolve();
}
});
}
/**
* @param {?} element
* @return {?}
*/
processLeaveNode(element) {
/** @type {?} */
const details = (/** @type {?} */ (element[REMOVAL_FLAG]));
if (details && details.setForRemoval) {
// this will prevent it from removing it twice
element[REMOVAL_FLAG] = NULL_REMOVAL_STATE;
if (details.namespaceId) {
this.destroyInnerAnimations(element);
/** @type {?} */
const ns = this._fetchNamespace(details.namespaceId);
if (ns) {
ns.clearElementCache(element);
}
}
this._onRemovalComplete(element, details.setForRemoval);
}
if (this.driver.matchesElement(element, DISABLED_SELECTOR)) {
this.markElementAsDisabled(element, false);
}
this.driver.query(element, DISABLED_SELECTOR, true).forEach(node => {
this.markElementAsDisabled(node, false);
});
}
/**
* @param {?=} microtaskId
* @return {?}
*/
flush(microtaskId = -1) {
/** @type {?} */
let players = [];
if (this.newHostElements.size) {
this.newHostElements.forEach((ns, element) => this._balanceNamespaceList(ns, element));
this.newHostElements.clear();
}
if (this.totalAnimations && this.collectedEnterElements.length) {
for (let i = 0; i < this.collectedEnterElements.length; i++) {
/** @type {?} */
const elm = this.collectedEnterElements[i];
addClass(elm, STAR_CLASSNAME);
}
}
if (this._namespaceList.length &&
(this.totalQueuedPlayers || this.collectedLeaveElements.length)) {
/** @type {?} */
const cleanupFns = [];
try {
players = this._flushAnimations(cleanupFns, microtaskId);
}
finally {
for (let i = 0; i < cleanupFns.length; i++) {
cleanupFns[i]();
}
}
}
else {
for (let i = 0; i < this.collectedLeaveElements.length; i++) {
/** @type {?} */
const element = this.collectedLeaveElements[i];
this.processLeaveNode(element);
}
}
this.totalQueuedPlayers = 0;
this.collectedEnterElements.length = 0;
this.collectedLeaveElements.length = 0;
this._flushFns.forEach(fn => fn());
this._flushFns = [];
if (this._whenQuietFns.length) {
// we move these over to a variable so that
// if any new callbacks are registered in another
// flush they do not populate the existing set
/** @type {?} */
const quietFns = this._whenQuietFns;
this._whenQuietFns = [];
if (players.length) {
optimizeGroupPlayer(players).onDone(() => { quietFns.forEach(fn => fn()); });
}
else {
quietFns.forEach(fn => fn());
}
}
}
/**
* @param {?} errors
* @return {?}
*/
reportError(errors) {
throw new Error(`Unable to process animations due to the following failed trigger transitions\n ${errors.join('\n')}`);
}
/**
* @private
* @param {?} cleanupFns
* @param {?} microtaskId
* @return {?}
*/
_flushAnimations(cleanupFns, microtaskId) {
/** @type {?} */
const subTimelines = new ElementInstructionMap();
/** @type {?} */
const skippedPlayers = [];
/** @type {?} */
const skippedPlayersMap = new Map();
/** @type {?} */
const queuedInstructions = [];
/** @type {?} */
const queriedElements = new Map();
/** @type {?} */
const allPreStyleElements = new Map();
/** @type {?} */
const allPostStyleElements = new Map();
/** @type {?} */
const disabledElementsSet = new Set();
this.disabledNodes.forEach(node => {
disabledElementsSet.add(node);
/** @type {?} */
const nodesThatAreDisabled = this.driver.query(node, QUEUED_SELECTOR, true);
for (let i = 0; i < nodesThatAreDisabled.length; i++) {
disabledElementsSet.add(nodesThatAreDisabled[i]);
}
});
/** @type {?} */
const bodyNode = this.bodyNode;
/** @type {?} */
const allTriggerElements = Array.from(this.statesByElement.keys());
/** @type {?} */
const enterNodeMap = buildRootMap(allTriggerElements, this.collectedEnterElements);
// this must occur before the instructions are built below such that
// the :enter queries match the elements (since the timeline queries
// are fired during instruction building).
/** @type {?} */
const enterNodeMapIds = new Map();
/** @type {?} */
let i = 0;
enterNodeMap.forEach((nodes, root) => {
/** @type {?} */
const className = ENTER_CLASSNAME + i++;
enterNodeMapIds.set(root, className);
nodes.forEach(node => addClass(node, className));
});
/** @type {?} */
const allLeaveNodes = [];
/** @type {?} */
const mergedLeaveNodes = new Set();
/** @type {?} */
const leaveNodesWithoutAnimations = new Set();
for (let i = 0; i < this.collectedLeaveElements.length; i++) {
/** @type {?} */
const element = this.collectedLeaveElements[i];
/** @type {?} */
const details = (/** @type {?} */ (element[REMOVAL_FLAG]));
if (details && details.setForRemoval) {
allLeaveNodes.push(element);
mergedLeaveNodes.add(element);
if (details.hasAnimation) {
this.driver.query(element, STAR_SELECTOR, true).forEach(elm => mergedLeaveNodes.add(elm));
}
else {
leaveNodesWithoutAnimations.add(element);
}
}
}
/** @type {?} */
const leaveNodeMapIds = new Map();
/** @type {?} */
const leaveNodeMap = buildRootMap(allTriggerElements, Array.from(mergedLeaveNodes));
leaveNodeMap.forEach((nodes, root) => {
/** @type {?} */
const className = LEAVE_CLASSNAME + i++;
leaveNodeMapIds.set(root, className);
nodes.forEach(node => addClass(node, className));
});
cleanupFns.push(() => {
enterNodeMap.forEach((nodes, root) => {
/** @type {?} */
const className = (/** @type {?} */ (enterNodeMapIds.get(root)));
nodes.forEach(node => removeClass(node, className));
});
leaveNodeMap.forEach((nodes, root) => {
/** @type {?} */
const className = (/** @type {?} */ (leaveNodeMapIds.get(root)));
nodes.forEach(node => removeClass(node, className));
});
allLeaveNodes.forEach(element => { this.processLeaveNode(element); });
});
/** @type {?} */
const allPlayers = [];
/** @type {?} */
const erroneousTransitions = [];
for (let i = this._namespaceList.length - 1; i >= 0; i--) {
/** @type {?} */
const ns = this._namespaceList[i];
ns.drainQueuedTransitions(microtaskId).forEach(entry => {
/** @type {?} */
const player = entry.player;
/** @type {?} */
const element = entry.element;
allPlayers.push(player);
if (this.collectedEnterElements.length) {
/** @type {?} */
const details = (/** @type {?} */ (element[REMOVAL_FLAG]));
// move animations are currently not supported...
if (details && details.setForMove) {
player.destroy();
return;
}
}
/** @type {?} */
const nodeIsOrphaned = !bodyNode || !this.driver.containsElement(bodyNode, element);
/** @type {?} */
const leaveClassName = (/** @type {?} */ (leaveNodeMapIds.get(element)));
/** @type {?} */
const enterClassName = (/** @type {?} */ (enterNodeMapIds.get(element)));
/** @type {?} */
const instruction = (/** @type {?} */ (this._buildInstruction(entry, subTimelines, enterClassName, leaveClassName, nodeIsOrphaned)));
if (instruction.errors && instruction.errors.length) {
erroneousTransitions.push(instruction);
return;
}
// even though the element may not be apart of the DOM, it may
// still be added at a later point (due to the mechanics of content
// projection and/or dynamic component insertion) therefore it's
// important we still style the element.
if (nodeIsOrphaned) {
player.onStart(() => eraseStyles(element, instruction.fromStyles));
player.onDestroy(() => setStyles(element, instruction.toStyles));
skippedPlayers.push(player);
return;
}
// if a unmatched transition is queued to go then it SHOULD NOT render
// an animation and cancel the previously running animations.
if (entry.isFallbackTransition) {
player.onStart(() => eraseStyles(element, instruction.fromStyles));
player.onDestroy(() => setStyles(element, instruction.toStyles));
skippedPlayers.push(player);
return;
}
// this means that if a parent animation uses this animation as a sub trigger
// then it will instruct the timeline builder to not add a player delay, but
// instead stretch the first keyframe gap up until the animation starts. The
// reason this is important is to prevent extra initialization styles from being
// required by the user in the animation.
instruction.timelines.forEach(tl => tl.stretchStartingKeyframe = true);
subTimelines.append(element, instruction.timelines);
/** @type {?} */
const tuple = { instruction, player, element };
queuedInstructions.push(tuple);
instruction.queriedElements.forEach(element => getOrSetAsInMap(queriedElements, element, []).push(player));
instruction.preStyleProps.forEach((stringMap, element) => {
/** @type {?} */
const props = Object.keys(stringMap);
if (props.length) {
/** @type {?} */
let setVal = (/** @type {?} */ (allPreStyleElements.get(element)));
if (!setVal) {
allPreStyleElements.set(element, setVal = new Set());
}
props.forEach(prop => setVal.add(prop));
}
});
instruction.postStyleProps.forEach((stringMap, element) => {
/** @type {?} */
const props = Object.keys(stringMap);
/** @type {?} */
let setVal = (/** @type {?} */ (allPostStyleElements.get(element)));
if (!setVal) {
allPostStyleElements.set(element, setVal = new Set());
}
props.forEach(prop => setVal.add(prop));
});
});
}
if (erroneousTransitions.length) {
/** @type {?} */
const errors = [];
erroneousTransitions.forEach(instruction => {
errors.push(`@${instruction.triggerName} has failed due to:\n`);
(/** @type {?} */ (instruction.errors)).forEach(error => errors.push(`- ${error}\n`));
});
allPlayers.forEach(player => player.destroy());
this.reportError(errors);
}
/** @type {?} */
const allPreviousPlayersMap = new Map();
// this map works to tell which element in the DOM tree is contained by
// which animation. Further down below this map will get populated once
// the players are built and in doing so it can efficiently figure out
// if a sub player is skipped due to a parent player having priority.
/** @type {?} */
const animationElementMap = new Map();
queuedInstructions.forEach(entry => {
/** @type {?} */
const element = entry.element;
if (subTimelines.has(element)) {
animationElementMap.set(element, element);
this._beforeAnimationBuild(entry.player.namespaceId, entry.instruction, allPreviousPlayersMap);
}
});
skippedPlayers.forEach(player => {
/** @type {?} */
const element = player.element;
/** @type {?} */
const previousPlayers = this._getPreviousPlayers(element, false, player.namespaceId, player.triggerName, null);
previousPlayers.forEach(prevPlayer => {
getOrSetAsInMap(allPreviousPlayersMap, element, []).push(prevPlayer);
prevPlayer.destroy();
});
});
// this is a special case for nodes that will be removed (either by)
// having their own leave animations or by being queried in a container
// that will be removed once a parent animation is complete. The idea
// here is that * styles must be identical to ! styles because of
// backwards compatibility (* is also filled in by default in many places).
// Otherwise * styles will return an empty value or auto since the element
// that is being getComputedStyle'd will not be visible (since * = destination)
/** @type {?} */
const replaceNodes = allLeaveNodes.filter(node => {
return replacePostStylesAsPre(node, allPreStyleElements, allPostStyleElements);
});
// POST STAGE: fill the * styles
/** @type {?} */
const postStylesMap = new Map();
/** @type {?} */
const allLeaveQueriedNodes = cloakAndComputeStyles(postStylesMap, this.driver, leaveNodesWithoutAnimations, allPostStyleElements, AUTO_STYLE);
allLeaveQueriedNodes.forEach(node => {
if (replacePostStylesAsPre(node, allPreStyleElements, allPostStyleElements)) {
replaceNodes.push(node);
}
});
// PRE STAGE: fill the ! styles
/** @type {?} */
const preStylesMap = new Map();
enterNodeMap.forEach((nodes, root) => {
cloakAndComputeStyles(preStylesMap, this.driver, new Set(nodes), allPreStyleElements, ɵPRE_STYLE);
});
replaceNodes.forEach(node => {
/** @type {?} */
const post = postStylesMap.get(node);
/** @type {?} */
const pre = preStylesMap.get(node);
postStylesMap.set(node, (/** @type {?} */ (Object.assign({}, post, pre))));
});
/** @type {?} */
const rootPlayers = [];
/** @type {?} */
const subPlayers = [];
/** @type {?} */
const NO_PARENT_ANIMATION_ELEMENT_DETECTED = {};
queuedInstructions.forEach(entry => {
const { element, player, instruction } = entry;
// this means that it was never consumed by a parent animation which
// means that it is independent and therefore should be set for animation
if (subTimelines.has(element)) {
if (disabledElementsSet.has(element)) {
player.onDestroy(() => setStyles(element, instruction.toStyles));
player.disabled = true;
player.overrideTotalTime(instruction.totalTime);
skippedPlayers.push(player);
return;
}
// this will flow up the DOM and query the map to figure out
// if a parent animation has priority over it. In the situation
// that a parent is detected then it will cancel the loop. If
// nothing is detected, or it takes a few hops to find a parent,
// then it will fill in the missing nodes and signal them as having
// a detected parent (or a NO_PARENT value via a special constant).
/** @type {?} */
let parentWithAnimation = NO_PARENT_ANIMATION_ELEMENT_DETECTED;
if (animationElementMap.size > 1) {
/** @type {?} */
let elm = element;
/** @type {?} */
const parentsToAdd = [];
while (elm = elm.parentNode) {
/** @type {?} */
const detectedParent = animationElementMap.get(elm);
if (detectedParent) {
parentWithAnimation = detectedParent;
break;
}
parentsToAdd.push(elm);
}
parentsToAdd.forEach(parent => animationElementMap.set(parent, parentWithAnimation));
}
/** @type {?} */
const innerPlayer = this._buildAnimation(player.namespaceId, instruction, allPreviousPlayersMap, skippedPlayersMap, preStylesMap, postStylesMap);
player.setRealPlayer(innerPlayer);
if (parentWithAnimation === NO_PARENT_ANIMATION_ELEMENT_DETECTED) {
rootPlayers.push(player);
}
else {
/** @type {?} */
const parentPlayers = this.playersByElement.get(parentWithAnimation);
if (parentPlayers && parentPlayers.length) {
player.parentPlayer = optimizeGroupPlayer(parentPlayers);
}
skippedPlayers.push(player);
}
}
else {
eraseStyles(element, instruction.fromStyles);
player.onDestroy(() => setStyles(element, instruction.toStyles));
// there still might be a ancestor player animating this
// element therefore we will still add it as a sub player
// even if its animation may be disabled
subPlayers.push(player);
if (disabledElementsSet.has(element)) {
skippedPlayers.push(player);
}
}
});
// find all of the sub players' corresponding inner animation player
subPlayers.forEach(player => {
// even if any players are not found for a sub animation then it
// will still complete itself after the next tick since it's Noop
/** @type {?} */
const playersForElement = skippedPlayersMap.get(player.element);
if (playersForElement && playersForElement.length) {
/** @type {?} */
const innerPlayer = optimizeGroupPlayer(playersForElement);
player.setRealPlayer(innerPlayer);
}
});
// the reason why we don't actually play the animation is
// because all that a skipped player is designed to do is to
// fire the start/done transition callback events
skippedPlayers.forEach(player => {
if (player.parentPlayer) {
player.syncPlayerEvents(player.parentPlayer);
}
else {
player.destroy();
}
});
// run through all of the queued removals and see if they
// were picked up by a query. If not then perform the removal
// operation right away unless a parent animation is ongoing.
for (let i = 0; i < allLeaveNodes.length; i++) {
/** @type {?} */
const element = allLeaveNodes[i];
/** @type {?} */
const details = (/** @type {?} */ (element[REMOVAL_FLAG]));
removeClass(element, LEAVE_CLASSNAME);
// this means the element has a removal animation that is being
// taken care of and therefore the inner elements will hang around
// until that animation is over (or the parent queried animation)
if (details && details.hasAnimation)
continue;
/** @type {?} */
let players = [];
// if this element is queried or if it contains queried children
// then we want for the element not to be removed from the page
// until the queried animations have finished
if (queriedElements.size) {
/** @type {?} */
let queriedPlayerResults = queriedElements.get(element);
if (queriedPlayerResults && queriedPlayerResults.length) {
players.push(...queriedPlayerResults);
}
/** @type {?} */
let queriedInnerElements = this.driver.query(element, NG_ANIMATING_SELECTOR, true);
for (let j = 0; j < queriedInnerElements.length; j++) {
/** @type {?} */
let queriedPlayers = queriedElements.get(queriedInnerElements[j]);
if (queriedPlayers && queriedPlayers.length) {
players.push(...queriedPlayers);
}
}
}
/** @type {?} */
const activePlayers = players.filter(p => !p.destroyed);
if (activePlayers.length) {
removeNodesAfterAnimationDone(this, element, activePlayers);
}
else {
this.processLeaveNode(element);
}
}
// this is required so the cleanup method doesn't remove them
allLeaveNodes.length = 0;
rootPlayers.forEach(player => {
this.players.push(player);
player.onDone(() => {
player.destroy();
/** @type {?} */
const index = this.players.indexOf(player);
this.players.splice(index, 1);
});
player.play();
});
return rootPlayers;
}
/**
* @param {?} namespaceId
* @param {?} element
* @return {?}
*/
elementContainsData(namespaceId, element) {
/** @type {?} */
let containsData = false;
/** @type {?} */
const details = (/** @type {?} */ (element[REMOVAL_FLAG]));
if (details && details.setForRemoval)
containsData = true;
if (this.playersByElement.has(element))
containsData = true;
if (this.playersByQueriedElement.has(element))
containsData = true;
if (this.statesByElement.has(element))
containsData = true;
return this._fetchNamespace(namespaceId).elementContainsData(element) || containsData;
}
/**
* @param {?} callback
* @return {?}
*/
afterFlush(callback) { this._flushFns.push(callback); }
/**
* @param {?} callback
* @return {?}
*/
afterFlushAnimationsDone(callback) { this._whenQuietFns.push(callback); }
/**
* @private
* @param {?} element
* @param {?} isQueriedElement
* @param {?=} namespaceId
* @param {?=} triggerName
* @param {?=} toStateValue
* @return {?}
*/
_getPreviousPlayers(element, isQueriedElement, namespaceId, triggerName, toStateValue) {
/** @type {?} */
let players = [];
if (isQueriedElement) {
/** @type {?} */
const queriedElementPlayers = this.playersByQueriedElement.get(element);
if (queriedElementPlayers) {
players = queriedElementPlayers;
}
}
else {
/** @type {?} */
const elementPlayers = this.playersByElement.get(element);
if (elementPlayers) {
/** @type {?} */
const isRemovalAnimation = !toStateValue || toStateValue == VOID_VALUE;
elementPlayers.forEach(player => {
if (player.queued)
return;
if (!isRemovalAnimation && player.triggerName != triggerName)
return;
players.push(player);
});
}
}
if (namespaceId || triggerName) {
players = players.filter(player => {
if (namespaceId && namespaceId != player.namespaceId)
return false;
if (triggerName && triggerName != player.triggerName)
return false;
return true;
});
}
return players;
}
/**
* @private
* @param {?} namespaceId
* @param {?} instruction
* @param {?} allPreviousPlayersMap
* @return {?}
*/
_beforeAnimationBuild(namespaceId, instruction, allPreviousPlayersMap) {
/** @type {?} */
const triggerName = instruction.triggerName;
/** @type {?} */
const rootElement = instruction.element;
// when a removal animation occurs, ALL previous players are collected
// and destroyed (even if they are outside of the current namespace)
/** @type {?} */
const targetNameSpaceId = instruction.isRemovalTransition ? undefined : namespaceId;
/** @type {?} */
const targetTriggerName = instruction.isRemovalTransition ? undefined : triggerName;
for (const timelineInstruction of instruction.timelines) {
/** @type {?} */
const element = timelineInstruction.element;
/** @type {?} */
const isQueriedElement = element !== rootElement;
/** @type {?} */
const players = getOrSetAsInMap(allPreviousPlayersMap, element, []);
/** @type {?} */
const previousPlayers = this._getPreviousPlayers(element, isQueriedElement, targetNameSpaceId, targetTriggerName, instruction.toState);
previousPlayers.forEach(player => {
/** @type {?} */
const realPlayer = (/** @type {?} */ (player.getRealPlayer()));
if (realPlayer.beforeDestroy) {
realPlayer.beforeDestroy();
}
player.destroy();
players.push(player);
});
}
// this needs to be done so that the PRE/POST styles can be
// computed properly without interfering with the previous animation
eraseStyles(rootElement, instruction.fromStyles);
}
/**
* @private
* @param {?} namespaceId
* @param {?} instruction
* @param {?} allPreviousPlayersMap
* @param {?} skippedPlayersMap
* @param {?} preStylesMap
* @param {?} postStylesMap
* @return {?}
*/
_buildAnimation(namespaceId, instruction, allPreviousPlayersMap, skippedPlayersMap, preStylesMap, postStylesMap) {
/** @type {?} */
const triggerName = instruction.triggerName;
/** @type {?} */
const rootElement = instruction.element;
// we first run this so that the previous animation player
// data can be passed into the successive animation players
/** @type {?} */
const allQueriedPlayers = [];
/** @type {?} */
const allConsumedElements = new Set();
/** @type {?} */
const allSubElements = new Set();
/** @type {?} */
const allNewPlayers = instruction.timelines.map(timelineInstruction => {
/** @type {?} */
const element = timelineInstruction.element;
allConsumedElements.add(element);
// FIXME (matsko): make sure to-be-removed animations are removed properly
/** @type {?} */
const details = element[REMOVAL_FLAG];
if (details && details.removedBeforeQueried)
return new NoopAnimationPlayer(timelineInstruction.duration, timelineInstruction.delay);
/** @type {?} */
const isQueriedElement = element !== rootElement;
/** @type {?} */
const previousPlayers = flattenGroupPlayers((allPreviousPlayersMap.get(element) || EMPTY_PLAYER_ARRAY)
.map(p => p.getRealPlayer()))
.filter(p => {
// the `element` is not apart of the AnimationPlayer definition, but
// Mock/WebAnimations
// use the element within their implementation. This will be added in Angular5 to
// AnimationPlayer
/** @type {?} */
const pp = (/** @type {?} */ (p));
return pp.element ? pp.element === element : false;
});
/** @type {?} */
const preStyles = preStylesMap.get(element);
/** @type {?} */
const postStyles = postStylesMap.get(element);
/** @type {?} */
const keyframes = normalizeKeyframes(this.driver, this._normalizer, element, timelineInstruction.keyframes, preStyles, postStyles);
/** @type {?} */
const player = this._buildPlayer(timelineInstruction, keyframes, previousPlayers);
// this means that this particular player belongs to a sub trigger. It is
// important that we match this player up with the corresponding (@trigger.listener)
if (timelineInstruction.subTimeline && skippedPlayersMap) {
allSubElements.add(element);
}
if (isQueriedElement) {
/** @type {?} */
const wrappedPlayer = new TransitionAnimationPlayer(namespaceId, triggerName, element);
wrappedPlayer.setRealPlayer(player);
allQueriedPlayers.push(wrappedPlayer);
}
return player;
});
allQueriedPlayers.forEach(player => {
getOrSetAsInMap(this.playersByQueriedElement, player.element, []).push(player);
player.onDone(() => deleteOrUnsetInMap(this.playersByQueriedElement, player.element, player));
});
allConsumedElements.forEach(element => addClass(element, NG_ANIMATING_CLASSNAME));
/** @type {?} */
const player = optimizeGroupPlayer(allNewPlayers);
player.onDestroy(() => {
allConsumedElements.forEach(element => removeClass(element, NG_ANIMATING_CLASSNAME));
setStyles(rootElement, instruction.toStyles);
});
// this basically makes all of the callbacks for sub element animations
// be dependent on the upper players for when they finish
allSubElements.forEach(element => { getOrSetAsInMap(skippedPlayersMap, element, []).push(player); });
return player;
}
/**
* @private
* @param {?} instruction
* @param {?} keyframes
* @param {?} previousPlayers
* @return {?}
*/
_buildPlayer(instruction, keyframes, previousPlayers) {
if (keyframes.length > 0) {
return this.driver.animate(instruction.element, keyframes, instruction.duration, instruction.delay, instruction.easing, previousPlayers);
}
// special case for when an empty transition|definition is provided
// ... there is no point in rendering an empty animation
return new NoopAnimationPlayer(instruction.duration, instruction.delay);
}
}
class TransitionAnimationPlayer {
/**
* @param {?} namespaceId
* @param {?} triggerName
* @param {?} element
*/
constructor(namespaceId, triggerName, element) {
this.namespaceId = namespaceId;
this.triggerName = triggerName;
this.element = element;
this._player = new NoopAnimationPlayer();
this._containsRealPlayer = false;
this._queuedCallbacks = {};
this.destroyed = false;
this.markedForDestroy = false;
this.disabled = false;
this.queued = true;
this.totalTime = 0;
}
/**
* @param {?} player
* @return {?}
*/
setRealPlayer(player) {
if (this._containsRealPlayer)
return;
this._player = player;
Object.keys(this._queuedCallbacks).forEach(phase => {
this._queuedCallbacks[phase].forEach(callback => listenOnPlayer(player, phase, undefined, callback));
});
this._queuedCallbacks = {};
this._containsRealPlayer = true;
this.overrideTotalTime(player.totalTime);
((/** @type {?} */ (this))).queued = false;
}
/**
* @return {?}
*/
getRealPlayer() { return this._player; }
/**
* @param {?} totalTime
* @return {?}
*/
overrideTotalTime(totalTime) { ((/** @type {?} */ (this))).totalTime = totalTime; }
/**
* @param {?} player
* @return {?}
*/
syncPlayerEvents(player) {
/** @type {?} */
const p = (/** @type {?} */ (this._player));
if (p.triggerCallback) {
player.onStart(() => (/** @type {?} */ (p.triggerCallback))('start'));
}
player.onDone(() => this.finish());
player.onDestroy(() => this.destroy());
}
/**
* @private
* @param {?} name
* @param {?} callback
* @return {?}
*/
_queueEvent(name, callback) {
getOrSetAsInMap(this._queuedCallbacks, name, []).push(callback);
}
/**
* @param {?} fn
* @return {?}
*/
onDone(fn) {
if (this.queued) {
this._queueEvent('done', fn);
}
this._player.onDone(fn);
}
/**
* @param {?} fn
* @return {?}
*/
onStart(fn) {
if (this.queued) {
this._queueEvent('start', fn);
}
this._player.onStart(fn);
}
/**
* @param {?} fn
* @return {?}
*/
onDestroy(fn) {
if (this.queued) {
this._queueEvent('destroy', fn);
}
this._player.onDestroy(fn);
}
/**
* @return {?}
*/
init() { this._player.init(); }
/**
* @return {?}
*/
hasStarted() { return this.queued ? false : this._player.hasStarted(); }
/**
* @return {?}
*/
play() { !this.queued && this._player.play(); }
/**
* @return {?}
*/
pause() { !this.queued && this._player.pause(); }
/**
* @return {?}
*/
restart() { !this.queued && this._player.restart(); }
/**
* @return {?}
*/
finish() { this._player.finish(); }
/**
* @return {?}
*/
destroy() {
((/** @type {?} */ (this))).destroyed = true;
this._player.destroy();
}
/**
* @return {?}
*/
reset() { !this.queued && this._player.reset(); }
/**
* @param {?} p
* @return {?}
*/
setPosition(p) {
if (!this.queued) {
this._player.setPosition(p);
}
}
/**
* @return {?}
*/
getPosition() { return this.queued ? 0 : this._player.getPosition(); }
/**
* \@internal
* @param {?} phaseName
* @return {?}
*/
triggerCallback(phaseName) {
/** @type {?} */
const p = (/** @type {?} */ (this._player));
if (p.triggerCallback) {
p.triggerCallback(phaseName);
}
}
}
/**
* @param {?} map
* @param {?} key
* @param {?} value
* @return {?}
*/
function deleteOrUnsetInMap(map, key, value) {
/** @type {?} */
let currentValues;
if (map instanceof Map) {
currentValues = map.get(key);
if (currentValues) {
if (currentValues.length) {
/** @type {?} */
const index = currentValues.indexOf(value);
currentValues.splice(index, 1);
}
if (currentValues.length == 0) {
map.delete(key);
}
}
}
else {
currentValues = map[key];
if (currentValues) {
if (currentValues.length) {
/** @type {?} */
const index = currentValues.indexOf(value);
currentValues.splice(index, 1);
}
if (currentValues.length == 0) {
delete map[key];
}
}
}
return currentValues;
}
/**
* @param {?} value
* @return {?}
*/
function normalizeTriggerValue(value) {
// we use `!= null` here because it's the most simple
// way to test against a "falsy" value without mixing
// in empty strings or a zero value. DO NOT OPTIMIZE.
return value != null ? value : null;
}
/**
* @param {?} node
* @return {?}
*/
function isElementNode(node) {
return node && node['nodeType'] === 1;
}
/**
* @param {?} eventName
* @return {?}
*/
function isTriggerEventValid(eventName) {
return eventName == 'start' || eventName == 'done';
}
/**
* @param {?} element
* @param {?=} value
* @return {?}
*/
function cloakElement(element, value) {
/** @type {?} */
const oldValue = element.style.display;
element.style.display = value != null ? value : 'none';
return oldValue;
}
/**
* @param {?} valuesMap
* @param {?} driver
* @param {?} elements
* @param {?} elementPropsMap
* @param {?} defaultStyle
* @return {?}
*/
function cloakAndComputeStyles(valuesMap, driver, elements, elementPropsMap, defaultStyle) {
/** @type {?} */
const cloakVals = [];
elements.forEach(element => cloakVals.push(cloakElement(element)));
/** @type {?} */
const failedElements = [];
elementPropsMap.forEach((props, element) => {
/** @type {?} */
const styles = {};
props.forEach(prop => {
/** @type {?} */
const value = styles[prop] = driver.computeStyle(element, prop, defaultStyle);
// there is no easy way to detect this because a sub element could be removed
// by a parent animation element being detached.
if (!value || value.length == 0) {
element[REMOVAL_FLAG] = NULL_REMOVED_QUERIED_STATE;
failedElements.push(element);
}
});
valuesMap.set(element, styles);
});
// we use a index variable here since Set.forEach(a, i) does not return
// an index value for the closure (but instead just the value)
/** @type {?} */
let i = 0;
elements.forEach(element => cloakElement(element, cloakVals[i++]));
return failedElements;
}
/*
Since the Angular renderer code will return a collection of inserted
nodes in all areas of a DOM tree, it's up to this algorithm to figure
out which nodes are roots for each animation @trigger.
By placing each inserted node into a Set and traversing upwards, it
is possible to find the @trigger elements and well any direct *star
insertion nodes, if a @trigger root is found then the enter element
is placed into the Map[@trigger] spot.
*/
/**
* @param {?} roots
* @param {?} nodes
* @return {?}
*/
function buildRootMap(roots, nodes) {
/** @type {?} */
const rootMap = new Map();
roots.forEach(root => rootMap.set(root, []));
if (nodes.length == 0)
return rootMap;
/** @type {?} */
const NULL_NODE = 1;
/** @type {?} */
const nodeSet = new Set(nodes);
/** @type {?} */
const localRootMap = new Map();
/**
* @param {?} node
* @return {?}
*/
function getRoot(node) {
if (!node)
return NULL_NODE;
/** @type {?} */
let root = localRootMap.get(node);
if (root)
return root;
/** @type {?} */
const parent = node.parentNode;
if (rootMap.has(parent)) { // ngIf inside @trigger
root = parent;
}
else if (nodeSet.has(parent)) { // ngIf inside ngIf
root = NULL_NODE;
}
else { // recurse upwards
root = getRoot(parent);
}
localRootMap.set(node, root);
return root;
}
nodes.forEach(node => {
/** @type {?} */
const root = getRoot(node);
if (root !== NULL_NODE) {
(/** @type {?} */ (rootMap.get(root))).push(node);
}
});
return rootMap;
}
/** @type {?} */
const CLASSES_CACHE_KEY = '$$classes';
/**
* @param {?} element
* @param {?} className
* @return {?}
*/
function addClass(element, className) {
if (element.classList) {
element.classList.add(className);
}
else {
/** @type {?} */
let classes = element[CLASSES_CACHE_KEY];
if (!classes) {
classes = element[CLASSES_CACHE_KEY] = {};
}
classes[className] = true;
}
}
/**
* @param {?} element
* @param {?} className
* @return {?}
*/
function removeClass(element, className) {
if (element.classList) {
element.classList.remove(className);
}
else {
/** @type {?} */
let classes = element[CLASSES_CACHE_KEY];
if (classes) {
delete classes[className];
}
}
}
/**
* @param {?} engine
* @param {?} element
* @param {?} players
* @return {?}
*/
function removeNodesAfterAnimationDone(engine, element, players) {
optimizeGroupPlayer(players).onDone(() => engine.processLeaveNode(element));
}
/**
* @param {?} players
* @return {?}
*/
function flattenGroupPlayers(players) {
/** @type {?} */
const finalPlayers = [];
_flattenGroupPlayersRecur(players, finalPlayers);
return finalPlayers;
}
/**
* @param {?} players
* @param {?} finalPlayers
* @return {?}
*/
function _flattenGroupPlayersRecur(players, finalPlayers) {
for (let i = 0; i < players.length; i++) {
/** @type {?} */
const player = players[i];
if (player instanceof ɵAnimationGroupPlayer) {
_flattenGroupPlayersRecur(player.players, finalPlayers);
}
else {
finalPlayers.push((/** @type {?} */ (player)));
}
}
}
/**
* @param {?} a
* @param {?} b
* @return {?}
*/
function objEquals(a, b) {
/** @type {?} */
const k1 = Object.keys(a);
/** @type {?} */
const k2 = Object.keys(b);
if (k1.length != k2.length)
return false;
for (let i = 0; i < k1.length; i++) {
/** @type {?} */
const prop = k1[i];
if (!b.hasOwnProperty(prop) || a[prop] !== b[prop])
return false;
}
return true;
}
/**
* @param {?} element
* @param {?} allPreStyleElements
* @param {?} allPostStyleElements
* @return {?}
*/
function replacePostStylesAsPre(element, allPreStyleElements, allPostStyleElements) {
/** @type {?} */
const postEntry = allPostStyleElements.get(element);
if (!postEntry)
return false;
/** @type {?} */
let preEntry = allPreStyleElements.get(element);
if (preEntry) {
postEntry.forEach(data => (/** @type {?} */ (preEntry)).add(data));
}
else {
allPreStyleElements.set(element, postEntry);
}
allPostStyleElements.delete(element);
return true;
}
/**
* @fileoverview added by tsickle
* @suppress {checkTypes,extraRequire,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
class AnimationEngine {
/**
* @param {?} bodyNode
* @param {?} _driver
* @param {?} normalizer
*/
constructor(bodyNode, _driver, normalizer) {
this.bodyNode = bodyNode;
this._driver = _driver;
this._triggerCache = {};
// this method is designed to be overridden by the code that uses this engine
this.onRemovalComplete = (element, context) => { };
this._transitionEngine = new TransitionAnimationEngine(bodyNode, _driver, normalizer);
this._timelineEngine = new TimelineAnimationEngine(bodyNode, _driver, normalizer);
this._transitionEngine.onRemovalComplete = (element, context) => this.onRemovalComplete(element, context);
}
/**
* @param {?} componentId
* @param {?} namespaceId
* @param {?} hostElement
* @param {?} name
* @param {?} metadata
* @return {?}
*/
registerTrigger(componentId, namespaceId, hostElement, name, metadata) {
/** @type {?} */
const cacheKey = componentId + '-' + name;
/** @type {?} */
let trigger = this._triggerCache[cacheKey];
if (!trigger) {
/** @type {?} */
const errors = [];
/** @type {?} */
const ast = (/** @type {?} */ (buildAnimationAst(this._driver, (/** @type {?} */ (metadata)), errors)));
if (errors.length) {
throw new Error(`The animation trigger "${name}" has failed to build due to the following errors:\n - ${errors.join("\n - ")}`);
}
trigger = buildTrigger(name, ast);
this._triggerCache[cacheKey] = trigger;
}
this._transitionEngine.registerTrigger(namespaceId, name, trigger);
}
/**
* @param {?} namespaceId
* @param {?} hostElement
* @return {?}
*/
register(namespaceId, hostElement) {
this._transitionEngine.register(namespaceId, hostElement);
}
/**
* @param {?} namespaceId
* @param {?} context
* @return {?}
*/
destroy(namespaceId, context) {
this._transitionEngine.destroy(namespaceId, context);
}
/**
* @param {?} namespaceId
* @param {?} element
* @param {?} parent
* @param {?} insertBefore
* @return {?}
*/
onInsert(namespaceId, element, parent, insertBefore) {
this._transitionEngine.insertNode(namespaceId, element, parent, insertBefore);
}
/**
* @param {?} namespaceId
* @param {?} element
* @param {?} context
* @return {?}
*/
onRemove(namespaceId, element, context) {
this._transitionEngine.removeNode(namespaceId, element, context);
}
/**
* @param {?} element
* @param {?} disable
* @return {?}
*/
disableAnimations(element, disable) {
this._transitionEngine.markElementAsDisabled(element, disable);
}
/**
* @param {?} namespaceId
* @param {?} element
* @param {?} property
* @param {?} value
* @return {?}
*/
process(namespaceId, element, property, value) {
if (property.charAt(0) == '@') {
const [id, action] = parseTimelineCommand(property);
/** @type {?} */
const args = (/** @type {?} */ (value));
this._timelineEngine.command(id, element, action, args);
}
else {
this._transitionEngine.trigger(namespaceId, element, property, value);
}
}
/**
* @param {?} namespaceId
* @param {?} element
* @param {?} eventName
* @param {?} eventPhase
* @param {?} callback
* @return {?}
*/
listen(namespaceId, element, eventName, eventPhase, callback) {
// @@listen
if (eventName.charAt(0) == '@') {
const [id, action] = parseTimelineCommand(eventName);
return this._timelineEngine.listen(id, element, action, callback);
}
return this._transitionEngine.listen(namespaceId, element, eventName, eventPhase, callback);
}
/**
* @param {?=} microtaskId
* @return {?}
*/
flush(microtaskId = -1) { this._transitionEngine.flush(microtaskId); }
/**
* @return {?}
*/
get players() {
return ((/** @type {?} */ (this._transitionEngine.players)))
.concat((/** @type {?} */ (this._timelineEngine.players)));
}
/**
* @return {?}
*/
whenRenderingDone() { return this._transitionEngine.whenRenderingDone(); }
}
/**
* @fileoverview added by tsickle
* @suppress {checkTypes,extraRequire,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
/**
* Returns an instance of `SpecialCasedStyles` if and when any special (non animateable) styles are
* detected.
*
* In CSS there exist properties that cannot be animated within a keyframe animation
* (whether it be via CSS keyframes or web-animations) and the animation implementation
* will ignore them. This function is designed to detect those special cased styles and
* return a container that will be executed at the start and end of the animation.
*
* @param {?} element
* @param {?} styles
* @return {?} an instance of `SpecialCasedStyles` if any special styles are detected otherwise `null`
*/
function packageNonAnimatableStyles(element, styles) {
/** @type {?} */
let startStyles = null;
/** @type {?} */
let endStyles = null;
if (Array.isArray(styles) && styles.length) {
startStyles = filterNonAnimatableStyles(styles[0]);
if (styles.length > 1) {
endStyles = filterNonAnimatableStyles(styles[styles.length - 1]);
}
}
else if (styles) {
startStyles = filterNonAnimatableStyles(styles);
}
return (startStyles || endStyles) ? new SpecialCasedStyles(element, startStyles, endStyles) :
null;
}
/**
* Designed to be executed during a keyframe-based animation to apply any special-cased styles.
*
* When started (when the `start()` method is run) then the provided `startStyles`
* will be applied. When finished (when the `finish()` method is called) the
* `endStyles` will be applied as well any any starting styles. Finally when
* `destroy()` is called then all styles will be removed.
*/
class SpecialCasedStyles {
/**
* @param {?} _element
* @param {?} _startStyles
* @param {?} _endStyles
*/
constructor(_element, _startStyles, _endStyles) {
this._element = _element;
this._startStyles = _startStyles;
this._endStyles = _endStyles;
this._state = 0 /* Pending */;
/** @type {?} */
let initialStyles = SpecialCasedStyles.initialStylesByElement.get(_element);
if (!initialStyles) {
SpecialCasedStyles.initialStylesByElement.set(_element, initialStyles = {});
}
this._initialStyles = initialStyles;
}
/**
* @return {?}
*/
start() {
if (this._state < 1 /* Started */) {
if (this._startStyles) {
setStyles(this._element, this._startStyles, this._initialStyles);
}
this._state = 1 /* Started */;
}
}
/**
* @return {?}
*/
finish() {
this.start();
if (this._state < 2 /* Finished */) {
setStyles(this._element, this._initialStyles);
if (this._endStyles) {
setStyles(this._element, this._endStyles);
this._endStyles = null;
}
this._state = 1 /* Started */;
}
}
/**
* @return {?}
*/
destroy() {
this.finish();
if (this._state < 3 /* Destroyed */) {
SpecialCasedStyles.initialStylesByElement.delete(this._element);
if (this._startStyles) {
eraseStyles(this._element, this._startStyles);
this._endStyles = null;
}
if (this._endStyles) {
eraseStyles(this._element, this._endStyles);
this._endStyles = null;
}
setStyles(this._element, this._initialStyles);
this._state = 3 /* Destroyed */;
}
}
}
SpecialCasedStyles.initialStylesByElement = new WeakMap();
/**
* @param {?} styles
* @return {?}
*/
function filterNonAnimatableStyles(styles) {
/** @type {?} */
let result = null;
/** @type {?} */
const props = Object.keys(styles);
for (let i = 0; i < props.length; i++) {
/** @type {?} */
const prop = props[i];
if (isNonAnimatableStyle(prop)) {
result = result || {};
result[prop] = styles[prop];
}
}
return result;
}
/**
* @param {?} prop
* @return {?}
*/
function isNonAnimatableStyle(prop) {
return prop === 'display' || prop === 'position';
}
/**
* @fileoverview added by tsickle
* @suppress {checkTypes,extraRequire,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
* @type {?}
*/
const ELAPSED_TIME_MAX_DECIMAL_PLACES = 3;
/** @type {?} */
const ANIMATION_PROP = 'animation';
/** @type {?} */
const ANIMATIONEND_EVENT = 'animationend';
/** @type {?} */
const ONE_SECOND$1 = 1000;
class ElementAnimationStyleHandler {
/**
* @param {?} _element
* @param {?} _name
* @param {?} _duration
* @param {?} _delay
* @param {?} _easing
* @param {?} _fillMode
* @param {?} _onDoneFn
*/
constructor(_element, _name, _duration, _delay, _easing, _fillMode, _onDoneFn) {
this._element = _element;
this._name = _name;
this._duration = _duration;
this._delay = _delay;
this._easing = _easing;
this._fillMode = _fillMode;
this._onDoneFn = _onDoneFn;
this._finished = false;
this._destroyed = false;
this._startTime = 0;
this._position = 0;
this._eventFn = (e) => this._handleCallback(e);
}
/**
* @return {?}
*/
apply() {
applyKeyframeAnimation(this._element, `${this._duration}ms ${this._easing} ${this._delay}ms 1 normal ${this._fillMode} ${this._name}`);
addRemoveAnimationEvent(this._element, this._eventFn, false);
this._startTime = Date.now();
}
/**
* @return {?}
*/
pause() { playPauseAnimation(this._element, this._name, 'paused'); }
/**
* @return {?}
*/
resume() { playPauseAnimation(this._element, this._name, 'running'); }
/**
* @param {?} position
* @return {?}
*/
setPosition(position) {
/** @type {?} */
const index = findIndexForAnimation(this._element, this._name);
this._position = position * this._duration;
setAnimationStyle(this._element, 'Delay', `-${this._position}ms`, index);
}
/**
* @return {?}
*/
getPosition() { return this._position; }
/**
* @private
* @param {?} event
* @return {?}
*/
_handleCallback(event) {
/** @type {?} */
const timestamp = event._ngTestManualTimestamp || Date.now();
/** @type {?} */
const elapsedTime = parseFloat(event.elapsedTime.toFixed(ELAPSED_TIME_MAX_DECIMAL_PLACES)) * ONE_SECOND$1;
if (event.animationName == this._name &&
Math.max(timestamp - this._startTime, 0) >= this._delay && elapsedTime >= this._duration) {
this.finish();
}
}
/**
* @return {?}
*/
finish() {
if (this._finished)
return;
this._finished = true;
this._onDoneFn();
addRemoveAnimationEvent(this._element, this._eventFn, true);
}
/**
* @return {?}
*/
destroy() {
if (this._destroyed)
return;
this._destroyed = true;
this.finish();
removeKeyframeAnimation(this._element, this._name);
}
}
/**
* @param {?} element
* @param {?} name
* @param {?} status
* @return {?}
*/
function playPauseAnimation(element, name, status) {
/** @type {?} */
const index = findIndexForAnimation(element, name);
setAnimationStyle(element, 'PlayState', status, index);
}
/**
* @param {?} element
* @param {?} value
* @return {?}
*/
function applyKeyframeAnimation(element, value) {
/** @type {?} */
const anim = getAnimationStyle(element, '').trim();
/** @type {?} */
let index = 0;
if (anim.length) {
index = countChars(anim, ',') + 1;
value = `${anim}, ${value}`;
}
setAnimationStyle(element, '', value);
return index;
}
/**
* @param {?} element
* @param {?} name
* @return {?}
*/
function removeKeyframeAnimation(element, name) {
/** @type {?} */
const anim = getAnimationStyle(element, '');
/** @type {?} */
const tokens = anim.split(',');
/** @type {?} */
const index = findMatchingTokenIndex(tokens, name);
if (index >= 0) {
tokens.splice(index, 1);
/** @type {?} */
const newValue = tokens.join(',');
setAnimationStyle(element, '', newValue);
}
}
/**
* @param {?} element
* @param {?} value
* @return {?}
*/
function findIndexForAnimation(element, value) {
/** @type {?} */
const anim = getAnimationStyle(element, '');
if (anim.indexOf(',') > 0) {
/** @type {?} */
const tokens = anim.split(',');
return findMatchingTokenIndex(tokens, value);
}
return findMatchingTokenIndex([anim], value);
}
/**
* @param {?} tokens
* @param {?} searchToken
* @return {?}
*/
function findMatchingTokenIndex(tokens, searchToken) {
for (let i = 0; i < tokens.length; i++) {
if (tokens[i].indexOf(searchToken) >= 0) {
return i;
}
}
return -1;
}
/**
* @param {?} element
* @param {?} fn
* @param {?} doRemove
* @return {?}
*/
function addRemoveAnimationEvent(element, fn, doRemove) {
doRemove ? element.removeEventListener(ANIMATIONEND_EVENT, fn) :
element.addEventListener(ANIMATIONEND_EVENT, fn);
}
/**
* @param {?} element
* @param {?} name
* @param {?} value
* @param {?=} index
* @return {?}
*/
function setAnimationStyle(element, name, value, index) {
/** @type {?} */
const prop = ANIMATION_PROP + name;
if (index != null) {
/** @type {?} */
const oldValue = element.style[prop];
if (oldValue.length) {
/** @type {?} */
const tokens = oldValue.split(',');
tokens[index] = value;
value = tokens.join(',');
}
}
element.style[prop] = value;
}
/**
* @param {?} element
* @param {?} name
* @return {?}
*/
function getAnimationStyle(element, name) {
return element.style[ANIMATION_PROP + name];
}
/**
* @param {?} value
* @param {?} char
* @return {?}
*/
function countChars(value, char) {
/** @type {?} */
let count = 0;
for (let i = 0; i < value.length; i++) {
/** @type {?} */
const c = value.charAt(i);
if (c === char)
count++;
}
return count;
}
/**
* @fileoverview added by tsickle
* @suppress {checkTypes,extraRequire,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
/** @type {?} */
const DEFAULT_FILL_MODE = 'forwards';
/** @type {?} */
const DEFAULT_EASING = 'linear';
class CssKeyframesPlayer {
/**
* @param {?} element
* @param {?} keyframes
* @param {?} animationName
* @param {?} _duration
* @param {?} _delay
* @param {?} easing
* @param {?} _finalStyles
* @param {?=} _specialStyles
*/
constructor(element, keyframes, animationName, _duration, _delay, easing, _finalStyles, _specialStyles) {
this.element = element;
this.keyframes = keyframes;
this.animationName = animationName;
this._duration = _duration;
this._delay = _delay;
this._finalStyles = _finalStyles;
this._specialStyles = _specialStyles;
this._onDoneFns = [];
this._onStartFns = [];
this._onDestroyFns = [];
this._started = false;
this.currentSnapshot = {};
this._state = 0;
this.easing = easing || DEFAULT_EASING;
this.totalTime = _duration + _delay;
this._buildStyler();
}
/**
* @param {?} fn
* @return {?}
*/
onStart(fn) { this._onStartFns.push(fn); }
/**
* @param {?} fn
* @return {?}
*/
onDone(fn) { this._onDoneFns.push(fn); }
/**
* @param {?} fn
* @return {?}
*/
onDestroy(fn) { this._onDestroyFns.push(fn); }
/**
* @return {?}
*/
destroy() {
this.init();
if (this._state >= 4 /* DESTROYED */)
return;
this._state = 4 /* DESTROYED */;
this._styler.destroy();
this._flushStartFns();
this._flushDoneFns();
if (this._specialStyles) {
this._specialStyles.destroy();
}
this._onDestroyFns.forEach(fn => fn());
this._onDestroyFns = [];
}
/**
* @private
* @return {?}
*/
_flushDoneFns() {
this._onDoneFns.forEach(fn => fn());
this._onDoneFns = [];
}
/**
* @private
* @return {?}
*/
_flushStartFns() {
this._onStartFns.forEach(fn => fn());
this._onStartFns = [];
}
/**
* @return {?}
*/
finish() {
this.init();
if (this._state >= 3 /* FINISHED */)
return;
this._state = 3 /* FINISHED */;
this._styler.finish();
this._flushStartFns();
if (this._specialStyles) {
this._specialStyles.finish();
}
this._flushDoneFns();
}
/**
* @param {?} value
* @return {?}
*/
setPosition(value) { this._styler.setPosition(value); }
/**
* @return {?}
*/
getPosition() { return this._styler.getPosition(); }
/**
* @return {?}
*/
hasStarted() { return this._state >= 2 /* STARTED */; }
/**
* @return {?}
*/
init() {
if (this._state >= 1 /* INITIALIZED */)
return;
this._state = 1 /* INITIALIZED */;
/** @type {?} */
const elm = this.element;
this._styler.apply();
if (this._delay) {
this._styler.pause();
}
}
/**
* @return {?}
*/
play() {
this.init();
if (!this.hasStarted()) {
this._flushStartFns();
this._state = 2 /* STARTED */;
if (this._specialStyles) {
this._specialStyles.start();
}
}
this._styler.resume();
}
/**
* @return {?}
*/
pause() {
this.init();
this._styler.pause();
}
/**
* @return {?}
*/
restart() {
this.reset();
this.play();
}
/**
* @return {?}
*/
reset() {
this._styler.destroy();
this._buildStyler();
this._styler.apply();
}
/**
* @private
* @return {?}
*/
_buildStyler() {
this._styler = new ElementAnimationStyleHandler(this.element, this.animationName, this._duration, this._delay, this.easing, DEFAULT_FILL_MODE, () => this.finish());
}
/**
* \@internal
* @param {?} phaseName
* @return {?}
*/
triggerCallback(phaseName) {
/** @type {?} */
const methods = phaseName == 'start' ? this._onStartFns : this._onDoneFns;
methods.forEach(fn => fn());
methods.length = 0;
}
/**
* @return {?}
*/
beforeDestroy() {
this.init();
/** @type {?} */
const styles = {};
if (this.hasStarted()) {
/** @type {?} */
const finished = this._state >= 3 /* FINISHED */;
Object.keys(this._finalStyles).forEach(prop => {
if (prop != 'offset') {
styles[prop] = finished ? this._finalStyles[prop] : computeStyle(this.element, prop);
}
});
}
this.currentSnapshot = styles;
}
}
/**
* @fileoverview added by tsickle
* @suppress {checkTypes,extraRequire,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
class DirectStylePlayer extends NoopAnimationPlayer {
/**
* @param {?} element
* @param {?} styles
*/
constructor(element, styles) {
super();
this.element = element;
this._startingStyles = {};
this.__initialized = false;
this._styles = hypenatePropsObject(styles);
}
/**
* @return {?}
*/
init() {
if (this.__initialized || !this._startingStyles)
return;
this.__initialized = true;
Object.keys(this._styles).forEach(prop => {
(/** @type {?} */ (this._startingStyles))[prop] = this.element.style[prop];
});
super.init();
}
/**
* @return {?}
*/
play() {
if (!this._startingStyles)
return;
this.init();
Object.keys(this._styles)
.forEach(prop => this.element.style.setProperty(prop, this._styles[prop]));
super.play();
}
/**
* @return {?}
*/
destroy() {
if (!this._startingStyles)
return;
Object.keys(this._startingStyles).forEach(prop => {
/** @type {?} */
const value = (/** @type {?} */ (this._startingStyles))[prop];
if (value) {
this.element.style.setProperty(prop, value);
}
else {
this.element.style.removeProperty(prop);
}
});
this._startingStyles = null;
super.destroy();
}
}
/**
* @fileoverview added by tsickle
* @suppress {checkTypes,extraRequire,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
/** @type {?} */
const KEYFRAMES_NAME_PREFIX = 'gen_css_kf_';
/** @type {?} */
const TAB_SPACE = ' ';
class CssKeyframesDriver {
constructor() {
this._count = 0;
this._head = document.querySelector('head');
this._warningIssued = false;
}
/**
* @param {?} prop
* @return {?}
*/
validateStyleProperty(prop) { return validateStyleProperty(prop); }
/**
* @param {?} element
* @param {?} selector
* @return {?}
*/
matchesElement(element, selector) {
return matchesElement(element, selector);
}
/**
* @param {?} elm1
* @param {?} elm2
* @return {?}
*/
containsElement(elm1, elm2) { return containsElement(elm1, elm2); }
/**
* @param {?} element
* @param {?} selector
* @param {?} multi
* @return {?}
*/
query(element, selector, multi) {
return invokeQuery(element, selector, multi);
}
/**
* @param {?} element
* @param {?} prop
* @param {?=} defaultValue
* @return {?}
*/
computeStyle(element, prop, defaultValue) {
return (/** @type {?} */ (((/** @type {?} */ (window.getComputedStyle(element))))[prop]));
}
/**
* @param {?} element
* @param {?} name
* @param {?} keyframes
* @return {?}
*/
buildKeyframeElement(element, name, keyframes) {
keyframes = keyframes.map(kf => hypenatePropsObject(kf));
/** @type {?} */
let keyframeStr = `@keyframes ${name} {\n`;
/** @type {?} */
let tab = '';
keyframes.forEach(kf => {
tab = TAB_SPACE;
/** @type {?} */
const offset = parseFloat(kf['offset']);
keyframeStr += `${tab}${offset * 100}% {\n`;
tab += TAB_SPACE;
Object.keys(kf).forEach(prop => {
/** @type {?} */
const value = kf[prop];
switch (prop) {
case 'offset':
return;
case 'easing':
if (value) {
keyframeStr += `${tab}animation-timing-function: ${value};\n`;
}
return;
default:
keyframeStr += `${tab}${prop}: ${value};\n`;
return;
}
});
keyframeStr += `${tab}}\n`;
});
keyframeStr += `}\n`;
/** @type {?} */
const kfElm = document.createElement('style');
kfElm.innerHTML = keyframeStr;
return kfElm;
}
/**
* @param {?} element
* @param {?} keyframes
* @param {?} duration
* @param {?} delay
* @param {?} easing
* @param {?=} previousPlayers
* @param {?=} scrubberAccessRequested
* @return {?}
*/
animate(element, keyframes, duration, delay, easing, previousPlayers = [], scrubberAccessRequested) {
if (scrubberAccessRequested) {
this._notifyFaultyScrubber();
}
/** @type {?} */
const previousCssKeyframePlayers = (/** @type {?} */ (previousPlayers.filter(player => player instanceof CssKeyframesPlayer)));
/** @type {?} */
const previousStyles = {};
if (allowPreviousPlayerStylesMerge(duration, delay)) {
previousCssKeyframePlayers.forEach(player => {
/** @type {?} */
let styles = player.currentSnapshot;
Object.keys(styles).forEach(prop => previousStyles[prop] = styles[prop]);
});
}
keyframes = balancePreviousStylesIntoKeyframes(element, keyframes, previousStyles);
/** @type {?} */
const finalStyles = flattenKeyframesIntoStyles(keyframes);
// if there is no animation then there is no point in applying
// styles and waiting for an event to get fired. This causes lag.
// It's better to just directly apply the styles to the element
// via the direct styling animation player.
if (duration == 0) {
return new DirectStylePlayer(element, finalStyles);
}
/** @type {?} */
const animationName = `${KEYFRAMES_NAME_PREFIX}${this._count++}`;
/** @type {?} */
const kfElm = this.buildKeyframeElement(element, animationName, keyframes);
(/** @type {?} */ (document.querySelector('head'))).appendChild(kfElm);
/** @type {?} */
const specialStyles = packageNonAnimatableStyles(element, keyframes);
/** @type {?} */
const player = new CssKeyframesPlayer(element, keyframes, animationName, duration, delay, easing, finalStyles, specialStyles);
player.onDestroy(() => removeElement(kfElm));
return player;
}
/**
* @private
* @return {?}
*/
_notifyFaultyScrubber() {
if (!this._warningIssued) {
console.warn('@angular/animations: please load the web-animations.js polyfill to allow programmatic access...\n', ' visit http://bit.ly/IWukam to learn more about using the web-animation-js polyfill.');
this._warningIssued = true;
}
}
}
/**
* @param {?} keyframes
* @return {?}
*/
function flattenKeyframesIntoStyles(keyframes) {
/** @type {?} */
let flatKeyframes = {};
if (keyframes) {
/** @type {?} */
const kfs = Array.isArray(keyframes) ? keyframes : [keyframes];
kfs.forEach(kf => {
Object.keys(kf).forEach(prop => {
if (prop == 'offset' || prop == 'easing')
return;
flatKeyframes[prop] = kf[prop];
});
});
}
return flatKeyframes;
}
/**
* @param {?} node
* @return {?}
*/
function removeElement(node) {
node.parentNode.removeChild(node);
}
/**
* @fileoverview added by tsickle
* @suppress {checkTypes,extraRequire,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
class WebAnimationsPlayer {
/**
* @param {?} element
* @param {?} keyframes
* @param {?} options
* @param {?=} _specialStyles
*/
constructor(element, keyframes, options, _specialStyles) {
this.element = element;
this.keyframes = keyframes;
this.options = options;
this._specialStyles = _specialStyles;
this._onDoneFns = [];
this._onStartFns = [];
this._onDestroyFns = [];
this._initialized = false;
this._finished = false;
this._started = false;
this._destroyed = false;
this.time = 0;
this.parentPlayer = null;
this.currentSnapshot = {};
this._duration = (/** @type {?} */ (options['duration']));
this._delay = (/** @type {?} */ (options['delay'])) || 0;
this.time = this._duration + this._delay;
}
/**
* @private
* @return {?}
*/
_onFinish() {
if (!this._finished) {
this._finished = true;
this._onDoneFns.forEach(fn => fn());
this._onDoneFns = [];
}
}
/**
* @return {?}
*/
init() {
this._buildPlayer();
this._preparePlayerBeforeStart();
}
/**
* @private
* @return {?}
*/
_buildPlayer() {
if (this._initialized)
return;
this._initialized = true;
/** @type {?} */
const keyframes = this.keyframes;
((/** @type {?} */ (this))).domPlayer =
this._triggerWebAnimation(this.element, keyframes, this.options);
this._finalKeyframe = keyframes.length ? keyframes[keyframes.length - 1] : {};
this.domPlayer.addEventListener('finish', () => this._onFinish());
}
/**
* @private
* @return {?}
*/
_preparePlayerBeforeStart() {
// this is required so that the player doesn't start to animate right away
if (this._delay) {
this._resetDomPlayerState();
}
else {
this.domPlayer.pause();
}
}
/**
* \@internal
* @param {?} element
* @param {?} keyframes
* @param {?} options
* @return {?}
*/
_triggerWebAnimation(element, keyframes, options) {
// jscompiler doesn't seem to know animate is a native property because it's not fully
// supported yet across common browsers (we polyfill it for Edge/Safari) [CL #143630929]
return (/** @type {?} */ (element['animate'](keyframes, options)));
}
/**
* @param {?} fn
* @return {?}
*/
onStart(fn) { this._onStartFns.push(fn); }
/**
* @param {?} fn
* @return {?}
*/
onDone(fn) { this._onDoneFns.push(fn); }
/**
* @param {?} fn
* @return {?}
*/
onDestroy(fn) { this._onDestroyFns.push(fn); }
/**
* @return {?}
*/
play() {
this._buildPlayer();
if (!this.hasStarted()) {
this._onStartFns.forEach(fn => fn());
this._onStartFns = [];
this._started = true;
if (this._specialStyles) {
this._specialStyles.start();
}
}
this.domPlayer.play();
}
/**
* @return {?}
*/
pause() {
this.init();
this.domPlayer.pause();
}
/**
* @return {?}
*/
finish() {
this.init();
if (this._specialStyles) {
this._specialStyles.finish();
}
this._onFinish();
this.domPlayer.finish();
}
/**
* @return {?}
*/
reset() {
this._resetDomPlayerState();
this._destroyed = false;
this._finished = false;
this._started = false;
}
/**
* @private
* @return {?}
*/
_resetDomPlayerState() {
if (this.domPlayer) {
this.domPlayer.cancel();
}
}
/**
* @return {?}
*/
restart() {
this.reset();
this.play();
}
/**
* @return {?}
*/
hasStarted() { return this._started; }
/**
* @return {?}
*/
destroy() {
if (!this._destroyed) {
this._destroyed = true;
this._resetDomPlayerState();
this._onFinish();
if (this._specialStyles) {
this._specialStyles.destroy();
}
this._onDestroyFns.forEach(fn => fn());
this._onDestroyFns = [];
}
}
/**
* @param {?} p
* @return {?}
*/
setPosition(p) { this.domPlayer.currentTime = p * this.time; }
/**
* @return {?}
*/
getPosition() { return this.domPlayer.currentTime / this.time; }
/**
* @return {?}
*/
get totalTime() { return this._delay + this._duration; }
/**
* @return {?}
*/
beforeDestroy() {
/** @type {?} */
const styles = {};
if (this.hasStarted()) {
Object.keys(this._finalKeyframe).forEach(prop => {
if (prop != 'offset') {
styles[prop] =
this._finished ? this._finalKeyframe[prop] : computeStyle(this.element, prop);
}
});
}
this.currentSnapshot = styles;
}
/**
* \@internal
* @param {?} phaseName
* @return {?}
*/
triggerCallback(phaseName) {
/** @type {?} */
const methods = phaseName == 'start' ? this._onStartFns : this._onDoneFns;
methods.forEach(fn => fn());
methods.length = 0;
}
}
/**
* @fileoverview added by tsickle
* @suppress {checkTypes,extraRequire,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
class WebAnimationsDriver {
constructor() {
this._isNativeImpl = /\{\s*\[native\s+code\]\s*\}/.test(getElementAnimateFn().toString());
this._cssKeyframesDriver = new CssKeyframesDriver();
}
/**
* @param {?} prop
* @return {?}
*/
validateStyleProperty(prop) { return validateStyleProperty(prop); }
/**
* @param {?} element
* @param {?} selector
* @return {?}
*/
matchesElement(element, selector) {
return matchesElement(element, selector);
}
/**
* @param {?} elm1
* @param {?} elm2
* @return {?}
*/
containsElement(elm1, elm2) { return containsElement(elm1, elm2); }
/**
* @param {?} element
* @param {?} selector
* @param {?} multi
* @return {?}
*/
query(element, selector, multi) {
return invokeQuery(element, selector, multi);
}
/**
* @param {?} element
* @param {?} prop
* @param {?=} defaultValue
* @return {?}
*/
computeStyle(element, prop, defaultValue) {
return (/** @type {?} */ (((/** @type {?} */ (window.getComputedStyle(element))))[prop]));
}
/**
* @param {?} supported
* @return {?}
*/
overrideWebAnimationsSupport(supported) { this._isNativeImpl = supported; }
/**
* @param {?} element
* @param {?} keyframes
* @param {?} duration
* @param {?} delay
* @param {?} easing
* @param {?=} previousPlayers
* @param {?=} scrubberAccessRequested
* @return {?}
*/
animate(element, keyframes, duration, delay, easing, previousPlayers = [], scrubberAccessRequested) {
/** @type {?} */
const useKeyframes = !scrubberAccessRequested && !this._isNativeImpl;
if (useKeyframes) {
return this._cssKeyframesDriver.animate(element, keyframes, duration, delay, easing, previousPlayers);
}
/** @type {?} */
const fill = delay == 0 ? 'both' : 'forwards';
/** @type {?} */
const playerOptions = { duration, delay, fill };
// we check for this to avoid having a null|undefined value be present
// for the easing (which results in an error for certain browsers #9752)
if (easing) {
playerOptions['easing'] = easing;
}
/** @type {?} */
const previousStyles = {};
/** @type {?} */
const previousWebAnimationPlayers = (/** @type {?} */ (previousPlayers.filter(player => player instanceof WebAnimationsPlayer)));
if (allowPreviousPlayerStylesMerge(duration, delay)) {
previousWebAnimationPlayers.forEach(player => {
/** @type {?} */
let styles = player.currentSnapshot;
Object.keys(styles).forEach(prop => previousStyles[prop] = styles[prop]);
});
}
keyframes = keyframes.map(styles => copyStyles(styles, false));
keyframes = balancePreviousStylesIntoKeyframes(element, keyframes, previousStyles);
/** @type {?} */
const specialStyles = packageNonAnimatableStyles(element, keyframes);
return new WebAnimationsPlayer(element, keyframes, playerOptions, specialStyles);
}
}
/**
* @return {?}
*/
function supportsWebAnimations() {
return typeof getElementAnimateFn() === 'function';
}
/**
* @return {?}
*/
function getElementAnimateFn() {
return (isBrowser() && ((/** @type {?} */ (Element))).prototype['animate']) || {};
}
/**
* @fileoverview added by tsickle
* @suppress {checkTypes,extraRequire,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
/**
* @fileoverview added by tsickle
* @suppress {checkTypes,extraRequire,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
/**
* @fileoverview added by tsickle
* @suppress {checkTypes,extraRequire,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
/**
* @fileoverview added by tsickle
* @suppress {checkTypes,extraRequire,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
/**
* Generated bundle index. Do not edit.
*/
export { SpecialCasedStyles as ɵangular_packages_animations_browser_browser_a, AnimationDriver, AnimationDriver as ɵAnimationDriver, Animation as ɵAnimation, AnimationStyleNormalizer as ɵAnimationStyleNormalizer, NoopAnimationStyleNormalizer as ɵNoopAnimationStyleNormalizer, WebAnimationsStyleNormalizer as ɵWebAnimationsStyleNormalizer, NoopAnimationDriver as ɵNoopAnimationDriver, AnimationEngine as ɵAnimationEngine, CssKeyframesDriver as ɵCssKeyframesDriver, CssKeyframesPlayer as ɵCssKeyframesPlayer, containsElement as ɵcontainsElement, invokeQuery as ɵinvokeQuery, matchesElement as ɵmatchesElement, validateStyleProperty as ɵvalidateStyleProperty, WebAnimationsDriver as ɵWebAnimationsDriver, supportsWebAnimations as ɵsupportsWebAnimations, WebAnimationsPlayer as ɵWebAnimationsPlayer, allowPreviousPlayerStylesMerge as ɵallowPreviousPlayerStylesMerge };
//# sourceMappingURL=browser.js.map
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from shop.models import order
class OrderShipping(order.BaseOrderShipping):
"""Default materialized model for OrderShipping"""
|
const mongoose = require('mongoose');
const Phase = require('../models/Phase');
exports.getPhaseTypeNameForMatchId = async (tournamentId, matchId) => {
const identifier = await Phase.findOne({
tournamentId: mongoose.Types.ObjectId(tournamentId),
'matches._id': matchId
}
);
return identifier._doc.phaseType;
}
exports.getSettersPhase = (phases) => {
const octavos = phases.find(phase => phase.phaseType === 'Octavos de final');
const setCuartos = {};
const setOctavos = {};
octavos.matches.forEach((match, index) => {
const esPar = index % 2 === 0;
const localName = match.localTeam.teamName;
const visitorName = match.visitorTeam.teamName;
const nextIndex = esPar ? index / 2 : Math.floor((index / 2));
console.log(localName);
if(localName && visitorName && localName !== '-' && visitorName !== '-') {
return;
}
if(esPar) {
const matchString = `matches.${nextIndex}.localTeam.teamName`;
setCuartos[matchString] = localName;
} else {
const matchString = `matches.${nextIndex}.visitorTeam.teamName`;
setCuartos[matchString] = localName;
}
const cuartosState = `matches.${nextIndex}.state`
setCuartos[cuartosState] = 'Pendiente de Juego';
const octavosState = `matches.${index}.state`;
setOctavos[octavosState] = 'Finalizado';
});
return { setCuartos, setOctavos };
};
|
"""Tests for the PiCN Interfaces"""
|
# units.py
# Jacob Hummel
"""
Physical cgs unit conversions for analyzing my Gadget2 HDF5 snapshot data.
"""
### Unit Selection Dictionaries
class Units(object):
def __init__(self, **unitargs):
super(Units,self).__init__()
### Code units:
UnitMass_in_g = unitargs.pop('UnitMass_in_g', 1.989e43)
UnitLength_in_cm = unitargs.pop('UnitLength_in_cm', 3.085678e21)
UnitVelocity_in_cm_per_s= unitargs.pop('UnitVelocity_in_cm_per_s', 1e5)
### cgs Conversions
Time_s= UnitLength_in_cm / UnitVelocity_in_cm_per_s
Density_cgs= UnitMass_in_g / UnitLength_in_cm**3
Pressure_cgs= UnitMass_in_g / UnitLength_in_cm/ Time_s**2
Energy_cgs= UnitMass_in_g * UnitLength_in_cm**2 / Time_s**2
### Additional Mass Conversions
Mass_sun = UnitMass_in_g / 1.989e33
### Additional Distance Conversions
Length_AU = UnitLength_in_cm / 1.49598e13
Length_pc = UnitLength_in_cm / 3.085678e18
Length_kpc = Length_pc / 1e3
### Additional Velocity Conversions
Velocity_kms = UnitVelocity_in_cm_per_s / 1e5
### Additional Time Conversions
Time_yr = Time_s / 3.15569e7
Time_myr = Time_yr / 1e6
Time_gyr = Time_yr / 1e9
self.lengths = {'cm':UnitLength_in_cm, 'AU':Length_AU,
'pc':Length_pc, 'kpc':Length_kpc}
self.masses = {'g':UnitMass_in_g, 'solar':Mass_sun}
self.times = {'s':Time_s, 'yr':Time_yr, 'myr':Time_myr, 'gyr':Time_gyr}
self.velocities = {'cgs':UnitVelocity_in_cm_per_s, 'kms':Velocity_kms}
self.densities = {'cgs':Density_cgs}
self.pressures = {'cgs':Pressure_cgs}
self.energies = {'cgs':Energy_cgs, 'specific cgs':Energy_cgs/UnitMass_in_g}
self.remove_h = not unitargs.pop('units_over_h', False)
self.set_coordinate_system(unitargs.pop('coordinates', 'physical'))
self.set_length(unitargs.pop('length', 'kpc'))
self.set_mass(unitargs.pop('mass', 'solar'))
self.set_time(unitargs.pop('time', 'yr'))
self.set_velocity(unitargs.pop('velocity', 'kms'))
self.set_density(unitargs.pop('density', 'cgs'))
self.set_pressure(unitargs.pop('pressure', 'cgs'))
self.set_energy(unitargs.pop('energy', 'cgs'))
self._coord_unit = self.length_unit
self._smoothing_unit = self.length_unit
def set_coordinate_system(self,coordinates):
if coordinates not in ['physical', 'comoving']:
raise KeyError
self.coordinate_system = coordinates
def set_length(self, unit):
self.length_unit = unit
self.length_conv = self.lengths[self.length_unit]
def _set_coord_length(self, unit):
self.set_length(unit)
self._coord_unit = unit
def _set_smoothing_length(self, unit):
self.set_length(unit)
self._smoothing_unit = unit
def set_mass(self, unit):
self.mass_unit = unit
self.mass_conv = self.masses[self.mass_unit]
def set_time(self, unit):
self.time_unit = unit
self.time_conv = self.times[self.time_unit]
def set_velocity(self, unit):
self.velocity_unit = unit
self.velocity_conv = self.velocities[self.velocity_unit]
def set_density(self, unit):
self.density_unit = unit
self.density_conv = self.densities[self.density_unit]
def set_pressure(self, unit):
self.pressure_unit = unit
self.pressure_conv = self.pressures[self.pressure_unit]
def set_energy(self, unit):
self.energy_unit = unit
self.energy_conv = self.energies[self.energy_unit]
def convert_units(self, val, u1, u2):
val /= self.lengths[u1]
val *= self.lengths[u2]
return val
|
""" The rest framework provides us with a base class that we use to
make custom permissions classes"""
from rest_framework import permissions
class UpdateOwnProfile(permissions.BasePermission):
"""allow users to edit their own profile"""
# we add has_object_permissions function , it gets called every time a request is made to our
# API that we assign our permissions
def has_object_permission(self,request,view,obj):
"""check user is trying to edit their own profile"""
if request.method in permissions.SAFE_METHODS:
return True
return obj.id == request.user.id
|
import { combineReducers } from 'redux';
// needs to be named `form` or be ready for errors down the road
import { reducer as form } from 'redux-form';
import client from './components/Client/reducer';
import signup from './components/Signup/reducer';
import login from './components/Login/reducer';
import chatMessages from './components/Messages/reducer';
const IndexReducer = combineReducers({
client,
signup,
login,
chatMessages,
form,
});
export default IndexReducer;
|
'use strict';
var regTransformTypes = /matrix|translate|scale|rotate|skewX|skewY/,
regTransformSplit = /\s*(matrix|translate|scale|rotate|skewX|skewY)\s*\(\s*(.+?)\s*\)[\s,]*/,
regNumericValues = /[-+]?(?:\d*\.\d+|\d+\.?)(?:[eE][-+]?\d+)?/g;
/**
* Convert transform string to JS representation.
*
* @param {String} transformString input string
* @param {Object} params plugin params
* @return {Array} output array
*/
exports.transform2js = function(transformString) {
// JS representation of the transform data
var transforms = [],
// current transform context
current;
// split value into ['', 'translate', '10 50', '', 'scale', '2', '', 'rotate', '-45', '']
transformString.split(regTransformSplit).forEach(function(item) {
var num;
if (item) {
// if item is a translate function
if (regTransformTypes.test(item)) {
// then collect it and change current context
transforms.push(current = { name: item });
// else if item is data
} else {
// then split it into [10, 50] and collect as context.data
// eslint-disable-next-line no-cond-assign
while (num = regNumericValues.exec(item)) {
num = Number(num);
if (current.data)
current.data.push(num);
else
current.data = [num];
}
}
}
});
// return empty array if broken transform (no data)
return current && current.data ? transforms : [];
};
/**
* Multiply transforms into one.
*
* @param {Array} input transforms array
* @return {Array} output matrix array
*/
exports.transformsMultiply = function(transforms) {
// convert transforms objects to the matrices
transforms = transforms.map(function(transform) {
if (transform.name === 'matrix') {
return transform.data;
}
return transformToMatrix(transform);
});
// multiply all matrices into one
transforms = {
name: 'matrix',
data: transforms.length > 0 ? transforms.reduce(multiplyTransformMatrices) : []
};
return transforms;
};
/**
* Do math like a schoolgirl.
*
* @type {Object}
*/
var mth = exports.mth = {
rad: function(deg) {
return deg * Math.PI / 180;
},
deg: function(rad) {
return rad * 180 / Math.PI;
},
cos: function(deg) {
return Math.cos(this.rad(deg));
},
acos: function(val, floatPrecision) {
return +(this.deg(Math.acos(val)).toFixed(floatPrecision));
},
sin: function(deg) {
return Math.sin(this.rad(deg));
},
asin: function(val, floatPrecision) {
return +(this.deg(Math.asin(val)).toFixed(floatPrecision));
},
tan: function(deg) {
return Math.tan(this.rad(deg));
},
atan: function(val, floatPrecision) {
return +(this.deg(Math.atan(val)).toFixed(floatPrecision));
}
};
/**
* Decompose matrix into simple transforms. See
* https://frederic-wang.fr/decomposition-of-2d-transform-matrices.html
*
* @param {Object} data matrix transform object
* @return {Object|Array} transforms array or original transform object
*/
exports.matrixToTransform = function(transform, params) {
var floatPrecision = params.floatPrecision,
data = transform.data,
transforms = [],
sx = +Math.hypot(data[0], data[1]).toFixed(params.transformPrecision),
sy = +((data[0] * data[3] - data[1] * data[2]) / sx).toFixed(params.transformPrecision),
colsSum = data[0] * data[2] + data[1] * data[3],
rowsSum = data[0] * data[1] + data[2] * data[3],
scaleBefore = rowsSum != 0 || sx == sy;
// [..., ..., ..., ..., tx, ty] → translate(tx, ty)
if (data[4] || data[5]) {
transforms.push({ name: 'translate', data: data.slice(4, data[5] ? 6 : 5) });
}
// [sx, 0, tan(a)·sy, sy, 0, 0] → skewX(a)·scale(sx, sy)
if (!data[1] && data[2]) {
transforms.push({ name: 'skewX', data: [mth.atan(data[2] / sy, floatPrecision)] });
// [sx, sx·tan(a), 0, sy, 0, 0] → skewY(a)·scale(sx, sy)
} else if (data[1] && !data[2]) {
transforms.push({ name: 'skewY', data: [mth.atan(data[1] / data[0], floatPrecision)] });
sx = data[0];
sy = data[3];
// [sx·cos(a), sx·sin(a), sy·-sin(a), sy·cos(a), x, y] → rotate(a[, cx, cy])·(scale or skewX) or
// [sx·cos(a), sy·sin(a), sx·-sin(a), sy·cos(a), x, y] → scale(sx, sy)·rotate(a[, cx, cy]) (if !scaleBefore)
} else if (!colsSum || (sx == 1 && sy == 1) || !scaleBefore) {
if (!scaleBefore) {
sx = (data[0] < 0 ? -1 : 1) * Math.hypot(data[0], data[2]);
sy = (data[3] < 0 ? -1 : 1) * Math.hypot(data[1], data[3]);
transforms.push({ name: 'scale', data: [sx, sy] });
}
var angle = Math.min(Math.max(-1, data[0] / sx), 1),
rotate = [mth.acos(angle, floatPrecision) * ((scaleBefore ? 1 : sy) * data[1] < 0 ? -1 : 1)];
if (rotate[0]) transforms.push({ name: 'rotate', data: rotate });
if (rowsSum && colsSum) transforms.push({
name: 'skewX',
data: [mth.atan(colsSum / (sx * sx), floatPrecision)]
});
// rotate(a, cx, cy) can consume translate() within optional arguments cx, cy (rotation point)
if (rotate[0] && (data[4] || data[5])) {
transforms.shift();
var cos = data[0] / sx,
sin = data[1] / (scaleBefore ? sx : sy),
x = data[4] * (scaleBefore || sy),
y = data[5] * (scaleBefore || sx),
denom = (Math.pow(1 - cos, 2) + Math.pow(sin, 2)) * (scaleBefore || sx * sy);
rotate.push(((1 - cos) * x - sin * y) / denom);
rotate.push(((1 - cos) * y + sin * x) / denom);
}
// Too many transformations, return original matrix if it isn't just a scale/translate
} else if (data[1] || data[2]) {
return transform;
}
if (scaleBefore && (sx != 1 || sy != 1) || !transforms.length) transforms.push({
name: 'scale',
data: sx == sy ? [sx] : [sx, sy]
});
return transforms;
};
/**
* Convert transform to the matrix data.
*
* @param {Object} transform transform object
* @return {Array} matrix data
*/
function transformToMatrix(transform) {
if (transform.name === 'matrix') return transform.data;
var matrix;
switch (transform.name) {
case 'translate':
// [1, 0, 0, 1, tx, ty]
matrix = [1, 0, 0, 1, transform.data[0], transform.data[1] || 0];
break;
case 'scale':
// [sx, 0, 0, sy, 0, 0]
matrix = [transform.data[0], 0, 0, transform.data[1] || transform.data[0], 0, 0];
break;
case 'rotate':
// [cos(a), sin(a), -sin(a), cos(a), x, y]
var cos = mth.cos(transform.data[0]),
sin = mth.sin(transform.data[0]),
cx = transform.data[1] || 0,
cy = transform.data[2] || 0;
matrix = [cos, sin, -sin, cos, (1 - cos) * cx + sin * cy, (1 - cos) * cy - sin * cx];
break;
case 'skewX':
// [1, 0, tan(a), 1, 0, 0]
matrix = [1, 0, mth.tan(transform.data[0]), 1, 0, 0];
break;
case 'skewY':
// [1, tan(a), 0, 1, 0, 0]
matrix = [1, mth.tan(transform.data[0]), 0, 1, 0, 0];
break;
}
return matrix;
}
/**
* Applies transformation to an arc. To do so, we represent ellipse as a matrix, multiply it
* by the transformation matrix and use a singular value decomposition to represent in a form
* rotate(θ)·scale(a b)·rotate(φ). This gives us new ellipse params a, b and θ.
* SVD is being done with the formulae provided by Wolffram|Alpha (svd {{m0, m2}, {m1, m3}})
*
* @param {Array} arc [a, b, rotation in deg]
* @param {Array} transform transformation matrix
* @return {Array} arc transformed input arc
*/
exports.transformArc = function(arc, transform) {
var a = arc[0],
b = arc[1],
rot = arc[2] * Math.PI / 180,
cos = Math.cos(rot),
sin = Math.sin(rot),
h = Math.pow(arc[5] * cos + arc[6] * sin, 2) / (4 * a * a) +
Math.pow(arc[6] * cos - arc[5] * sin, 2) / (4 * b * b);
if (h > 1) {
h = Math.sqrt(h);
a *= h;
b *= h;
}
var ellipse = [a * cos, a * sin, -b * sin, b * cos, 0, 0],
m = multiplyTransformMatrices(transform, ellipse),
// Decompose the new ellipse matrix
lastCol = m[2] * m[2] + m[3] * m[3],
squareSum = m[0] * m[0] + m[1] * m[1] + lastCol,
root = Math.hypot(m[0] - m[3], m[1] + m[2]) * Math.hypot(m[0] + m[3], m[1] - m[2]);
if (!root) { // circle
arc[0] = arc[1] = Math.sqrt(squareSum / 2);
arc[2] = 0;
} else {
var majorAxisSqr = (squareSum + root) / 2,
minorAxisSqr = (squareSum - root) / 2,
major = Math.abs(majorAxisSqr - lastCol) > 1e-6,
sub = (major ? majorAxisSqr : minorAxisSqr) - lastCol,
rowsSum = m[0] * m[2] + m[1] * m[3],
term1 = m[0] * sub + m[2] * rowsSum,
term2 = m[1] * sub + m[3] * rowsSum;
arc[0] = Math.sqrt(majorAxisSqr);
arc[1] = Math.sqrt(minorAxisSqr);
arc[2] = ((major ? term2 < 0 : term1 > 0) ? -1 : 1) *
Math.acos((major ? term1 : term2) / Math.hypot(term1, term2)) * 180 / Math.PI;
}
if ((transform[0] < 0) !== (transform[3] < 0)) {
// Flip the sweep flag if coordinates are being flipped horizontally XOR vertically
arc[4] = 1 - arc[4];
}
return arc;
};
/**
* Multiply transformation matrices.
*
* @param {Array} a matrix A data
* @param {Array} b matrix B data
* @return {Array} result
*/
function multiplyTransformMatrices(a, b) {
return [
a[0] * b[0] + a[2] * b[1],
a[1] * b[0] + a[3] * b[1],
a[0] * b[2] + a[2] * b[3],
a[1] * b[2] + a[3] * b[3],
a[0] * b[4] + a[2] * b[5] + a[4],
a[1] * b[4] + a[3] * b[5] + a[5]
];
}
|
import React from "react";
function ListItem(props) {
return (
<li>
<img src={props.image} alt={props.alt} />
<h3>{props.heading}</h3>
<p>{props.content}</p>
</li>
);
}
export default ListItem;
|
#!/usr/bin/python
import sys
import IPO
import random
#objectiveType = 'weights'
#objectiveType = 'random'
#objectiveType = '0/1-random'
objectiveType = '0/-1-random'
objectiveZeroProbability = 0.5
N = 10000
V = range(10)
E = [ (i,j) for i in xrange(len(V)) for j in xrange(i+1,len(V)) ]
# Write LP.
fileName = '/tmp/ipo.lp'
lpData = '''
minimize cost: -x#0#1
s.t.
'''
for v in V:
lpData += ' ' + ' + '.join([ 'x#%d#%d' % (i,j) for (i,j) in E if i == v or j == v ]) + ' == 1\n'
lpData += 'generals\n'
lpData += 'binaries\n'
for e in E:
lpData += ' x#%d#%d\n' % e
lpData += 'end\n'
lpFile = open(fileName, 'w')
lpFile.write(lpData)
lpFile.close()
mils = IPO.MixedIntegerLinearSet(fileName)
matchingOracle = IPO.SCIPOracle(fileName, mils)
dominantOracle = IPO.DominantOracle("Dom", matchingOracle)
oracle = dominantOracle
P = IPO.Polyhedron(oracle)
cons = mils.getConstraints()
lp = IPO.LinearProgram(P.space)
dim, (points, rays), eqns = IPO.affineHull(oracle)
print 'Generating facets for %d directions.' % (N)
for i in xrange(N):
# print('Random objective #%d' % (i+1))
objective = [None] * lp.numVariables
if objectiveType == 'weights':
for c in xrange(lp.numVariables):
objective[c] = weights[c] + random.random()
elif objectiveType == 'random':
for c in xrange(lp.numVariables):
objective[c] = random.random() - 0.5
elif objectiveType == '0/1-random':
for c in xrange(lp.numVariables):
rnd = random.random()
if rnd <= objectiveZeroProbability:
objective[c] = 0
else:
objective[c] = 1
elif objectiveType == '0/-1-random':
for c in xrange(lp.numVariables):
rnd = random.random()
if rnd <= objectiveZeroProbability:
objective[c] = 0
else:
objective[c] = -1
elif objectiveType == '-1/0/1-random':
for c in xrange(lp.numVariables):
if random.random() <= objectiveZeroProbability:
objective[c] = 0
elif random.random() > 0.5:
objective[c] = 1
else:
objective[c] = -1
else:
assert False
lp.changeObjective(objective)
while True:
# sys.stdout.write('.')
# sys.stdout.flush()
# print('Solving LP with %d rows.' % (lp.numRows))
(status, vector, value) = lp.solve()
if status == IPO.LinearProgram.OPTIMAL:
# print('LP-optimum: %s' % (vector))
con = P.separatePoint(vector)
if con is None:
break
print('Separated facet: %s' % (con))
lp.addConstraint(con)
elif status == IPO.LinearProgram.UNBOUNDED:
# print('LP-ray: %s' % (vector))
con = P.separateRay(vector)
if con is None:
break
print('Separated facet: %s' % (con))
lp.addConstraint(con)
else:
raise Exception('Unexpected status %d' % (status))
print
|
/*
* GridGain Community Edition Licensing
* Copyright 2019 GridGain Systems, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License") modified with Commons Clause
* Restriction; you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*
* Commons Clause Restriction
*
* The Software is provided to you by the Licensor under the License, as defined below, subject to
* the following condition.
*
* Without limiting other conditions in the License, the grant of rights under the License will not
* include, and the License does not grant to you, the right to Sell the Software.
* For purposes of the foregoing, “Sell” means practicing any or all of the rights granted to you
* under the License to provide to third parties, for a fee or other consideration (including without
* limitation fees for hosting or consulting/ support services related to the Software), a product or
* service whose value derives, entirely or substantially, from the functionality of the Software.
* Any license notice or attribution required by the License must also include this Commons Clause
* License Condition notice.
*
* For purposes of the clause above, the “Licensor” is Copyright 2019 GridGain Systems, Inc.,
* the “License” is the Apache License, Version 2.0, and the Software is the GridGain Community
* Edition software provided with this notice.
*/
#ifndef _IGNITE_COMMON_CONCURRENT_OS
#define _IGNITE_COMMON_CONCURRENT_OS
#include <stdint.h>
#include <cassert>
#include <map>
#include <windows.h>
#include "ignite/common/common.h"
namespace ignite
{
namespace common
{
namespace concurrent
{
/**
* Static class to manage memory visibility semantics.
*/
class IGNITE_IMPORT_EXPORT Memory
{
public:
/**
* Full fence.
*/
static void Fence();
};
/**
* Critical section.
*/
class IGNITE_IMPORT_EXPORT CriticalSection
{
friend class ConditionVariable;
public:
/**
* Constructor.
*/
CriticalSection();
/**
* Destructor.
*/
~CriticalSection();
/**
* Enter critical section.
*/
void Enter();
/**
* Leave critical section.
*/
void Leave();
private:
/** Handle. */
CRITICAL_SECTION hnd;
IGNITE_NO_COPY_ASSIGNMENT(CriticalSection)
};
/**
* Special latch with count = 1.
*/
class IGNITE_IMPORT_EXPORT SingleLatch
{
public:
/**
* Constructor.
*/
SingleLatch();
/**
* Destructor.
*/
~SingleLatch();
/**
* Perform the countdown.
*/
void CountDown();
/**
* Await the countdown.
*/
void Await();
private:
/** Handle. */
HANDLE hnd;
IGNITE_NO_COPY_ASSIGNMENT(SingleLatch)
};
/**
* Primitives for atomic access.
*/
class Atomics
{
public:
/**
* Update the 32-bit integer value if it is equal to expected value.
*
* @param ptr Pointer.
* @param expVal Expected value.
* @param newVal New value.
* @return True if update occurred as a result of this call, false otherwise.
*/
static bool CompareAndSet32(int32_t* ptr, int32_t expVal, int32_t newVal);
/**
* Update the 32-bit integer value if it is equal to expected value.
*
* @param ptr Pointer.
* @param expVal Expected value.
* @param newVal New value.
* @return Value which were observed during CAS attempt.
*/
static int32_t CompareAndSet32Val(int32_t* ptr, int32_t expVal, int32_t newVal);
/**
* Increment 32-bit integer and return new value.
*
* @param ptr Pointer.
* @return Value after increment.
*/
static int32_t IncrementAndGet32(int32_t* ptr);
/**
* Decrement 32-bit integer and return new value.
*
* @param ptr Pointer.
* @return Value after decrement.
*/
static int32_t DecrementAndGet32(int32_t* ptr);
/**
* Update the 64-bit integer value if it is equal to expected value.
*
* @param ptr Pointer.
* @param expVal Expected value.
* @param newVal New value.
* @return True if update occurred as a result of this call, false otherwise.
*/
static bool CompareAndSet64(int64_t* ptr, int64_t expVal, int64_t newVal);
/**
* Update the 64-bit integer value if it is equal to expected value.
*
* @param ptr Pointer.
* @param expVal Expected value.
* @param newVal New value.
* @return Value which were observed during CAS attempt.
*/
static int64_t CompareAndSet64Val(int64_t* ptr, int64_t expVal, int64_t newVal);
/**
* Increment 64-bit integer and return new value.
*
* @param ptr Pointer.
* @return Value after increment.
*/
static int64_t IncrementAndGet64(int64_t* ptr);
/**
* Decrement 64-bit integer and return new value.
*
* @param ptr Pointer.
* @return Value after decrement.
*/
static int64_t DecrementAndGet64(int64_t* ptr);
};
/**
* Thread-local entry.
*/
class IGNITE_IMPORT_EXPORT ThreadLocalEntry
{
public:
/**
* Virtual destructor to allow for correct typed entries cleanup.
*/
virtual ~ThreadLocalEntry()
{
// No-op.
}
};
/**
* Typed thread-local entry.
*/
template<typename T>
class IGNITE_IMPORT_EXPORT ThreadLocalTypedEntry : public ThreadLocalEntry
{
public:
/**
* Constructor.
*
* @param val Value.
*/
ThreadLocalTypedEntry(T val) : val(val)
{
// No-op.
}
~ThreadLocalTypedEntry()
{
// No-op.
}
/**
* Get value.
*
* @return Value.
*/
T Get()
{
return val;
}
private:
/** Value. */
T val;
};
/**
* Thread-local abstraction.
*/
class IGNITE_IMPORT_EXPORT ThreadLocal
{
public:
/**
* Allocate thread-local index. Invoked once on DLL process attach.
*
* @return True if allocation was successful.
*/
static bool OnProcessAttach();
/**
* Release thread-local entry. Invoked on DLL thread detach.
*/
static void OnThreadDetach();
/**
* Release thread-local index. Invoked once on DLL process detach.
*/
static void OnProcessDetach();
/**
* Get next available index to be used in thread-local storage.
*
* @return Index.
*/
static int32_t NextIndex();
/**
* Get value by index.
*
* @param idx Index.
* @return Value associated with the index or NULL.
*/
template<typename T>
static T Get(int32_t idx)
{
void* winVal = Get0();
if (winVal)
{
std::map<int32_t, ThreadLocalEntry*>* map =
static_cast<std::map<int32_t, ThreadLocalEntry*>*>(winVal);
ThreadLocalTypedEntry<T>* entry = static_cast<ThreadLocalTypedEntry<T>*>((*map)[idx]);
if (entry)
return entry->Get();
}
return T();
}
/**
* Set value at the given index.
*
* @param idx Index.
* @param val Value to be associated with the index.
*/
template<typename T>
static void Set(int32_t idx, const T& val)
{
void* winVal = Get0();
if (winVal)
{
std::map<int32_t, ThreadLocalEntry*>* map =
static_cast<std::map<int32_t, ThreadLocalEntry*>*>(winVal);
ThreadLocalEntry* appVal = (*map)[idx];
if (appVal)
delete appVal;
(*map)[idx] = new ThreadLocalTypedEntry<T>(val);
}
else
{
std::map<int32_t, ThreadLocalEntry*>* map = new std::map<int32_t, ThreadLocalEntry*>();
Set0(map);
(*map)[idx] = new ThreadLocalTypedEntry<T>(val);
}
}
/**
* Remove value at the given index.
*
* @param idx Index.
*/
static void Remove(int32_t idx);
private:
/**
* Internal get routine.
*
* @param Associated value.
*/
static void* Get0();
/**
* Internal set routine.
*
* @param ptr Pointer.
*/
static void Set0(void* ptr);
/**
* Internal thread-local map clear routine.
*
* @param mapPtr Pointer to map.
*/
static void Clear0(void* mapPtr);
};
/**
* Thread-local instance. Simplifies API avoiding direct index allocations.
*/
template<typename T>
class IGNITE_IMPORT_EXPORT ThreadLocalInstance
{
public:
/**
* Constructor.
*/
ThreadLocalInstance() : idx(ThreadLocal::NextIndex())
{
// No-op.
}
/**
* Get value.
*
* @return Value.
*/
T Get()
{
return ThreadLocal::Get<T>(idx);
}
/**
* Set instance.
*
* @param val Value.
*/
void Set(const T& val)
{
ThreadLocal::Set<T>(idx, val);
}
/**
* Remove instance.
*/
void Remove()
{
ThreadLocal::Remove(idx);
}
private:
/** Index. */
int32_t idx;
};
/**
* Cross-platform wrapper for Condition Variable synchronization
* primitive concept.
*/
class ConditionVariable
{
public:
/**
* Constructor.
*/
ConditionVariable()
{
InitializeConditionVariable(&cond);
}
/**
* Destructor.
*/
~ConditionVariable()
{
// No-op.
}
/**
* Wait for Condition Variable to be notified.
*
* @param cs Critical section in which to wait.
*/
void Wait(CriticalSection& cs)
{
SleepConditionVariableCS(&cond, &cs.hnd, INFINITE);
}
/**
* Wait for Condition Variable to be notified for specified time.
*
* @param cs Critical section in which to wait.
* @param msTimeout Timeout in milliseconds.
* @return True if the object has been notified and false in case of timeout.
*/
bool WaitFor(CriticalSection& cs, int32_t msTimeout)
{
BOOL notified = SleepConditionVariableCS(&cond, &cs.hnd, msTimeout);
return notified != FALSE;
}
/**
* Notify single thread waiting for the condition variable.
*/
void NotifyOne()
{
WakeConditionVariable(&cond);
}
/**
* Notify all threads that are waiting on the variable.
*/
void NotifyAll()
{
WakeAllConditionVariable(&cond);
}
private:
IGNITE_NO_COPY_ASSIGNMENT(ConditionVariable);
/** OS-specific type. */
CONDITION_VARIABLE cond;
};
/**
* Manually triggered event.
* Once triggered it stays in passing state until manually reset.
*/
class ManualEvent
{
public:
/**
* Constructs manual event.
* Initial state is untriggered.
*/
ManualEvent()
{
handle = CreateEvent(NULL, TRUE, FALSE, NULL);
assert(handle != NULL);
}
/**
* Destructor.
*/
~ManualEvent()
{
CloseHandle(handle);
}
/**
* Sets event into triggered state.
*/
void Set()
{
BOOL success = SetEvent(handle);
assert(success);
}
/**
* Resets event into non-triggered state.
*/
void Reset()
{
BOOL success = ResetEvent(handle);
assert(success);
}
/**
* Wait for event to be triggered.
*/
void Wait()
{
DWORD res = WaitForSingleObject(handle, INFINITE);
assert(res == WAIT_OBJECT_0);
}
/**
* Wait for event to be triggered for specified time.
*
* @param msTimeout Timeout in milliseconds.
* @return True if the object has been triggered and false in case of timeout.
*/
bool WaitFor(int32_t msTimeout)
{
DWORD res = WaitForSingleObject(handle, static_cast<DWORD>(msTimeout));
assert(res == WAIT_OBJECT_0 || res == WAIT_TIMEOUT);
return res == WAIT_OBJECT_0;
}
private:
IGNITE_NO_COPY_ASSIGNMENT(ManualEvent);
/** Event handle. */
HANDLE handle;
};
}
}
}
#endif //_IGNITE_COMMON_CONCURRENT_OS
|
/*
* This file is part of the MicroPython project, http://micropython.org/
*
* The MIT License (MIT)
*
* Copyright (c) 2016 Scott Shawcroft
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#ifndef MICROPY_INCLUDED_ESP32S2_COMMON_HAL_MICROCONTROLLER_PIN_H
#define MICROPY_INCLUDED_ESP32S2_COMMON_HAL_MICROCONTROLLER_PIN_H
#include "py/mphal.h"
#include "peripherals/pins.h"
#ifdef MICROPY_HW_APA102_MOSI
extern bool apa102_sck_in_use;
extern bool apa102_mosi_in_use;
#endif
#ifdef MICROPY_HW_NEOPIXEL
extern bool neopixel_in_use;
#endif
void reset_all_pins(void);
// reset_pin_number takes the pin number instead of the pointer so that objects don't
// need to store a full pointer.
void reset_pin_number(gpio_num_t pin_number);
void common_hal_reset_pin(const mcu_pin_obj_t* pin);
void claim_pin(const mcu_pin_obj_t* pin);
bool pin_number_is_free(gpio_num_t pin_number);
void never_reset_pin_number(gpio_num_t pin_number);
#endif // MICROPY_INCLUDED_ESP32S2_COMMON_HAL_MICROCONTROLLER_PIN_H
|
import threading
from typing import Callable
class Animation(threading.Thread):
LOCK = threading.Lock()
def __init__(self, draw_function: Callable[[], None]):
super().__init__()
self.__draw_function: Callable[[], None] = draw_function
def draw_screen(self):
if self.LOCK.acquire(blocking=False):
self.__draw_function()
self.LOCK.release()
def run(self):
pass
|
# This file is where you keep secret settings, passwords, and tokens!
# If you put them in the code you risk committing that info or sharing it
secrets = {
'ssid' : 'CHANGE ME',
'password' : 'CHANGE ME',
# leave blank or use timezone from # http://worldtimeapi.org/timezones
'timezone' : '',
'aio_username' : 'CHANGE ME',
'aio_key' : 'CHANGE ME',
}
|
from typing import Callable, Generator, Tuple
def get_run_generator(test_data: Tuple[str]) -> Callable[[], str]:
test_data_gen: Generator[str, None, None] = (line for line in test_data)
def generate_input() -> str:
return next(test_data_gen)
return generate_input
|
from bangtal.game import EventID
from bangtal.game import MouseAction
from bangtal.game import GameOption
from bangtal.game import GameServer
from bangtal.scene import Scene
from bangtal.object import Object
from bangtal.object import ObjectManager
from bangtal.timer import Timer
from bangtal.sound import Sound
class Game:
pass
def startGame(scene):
GameServer.instance().startGame(scene.ID)
def endGame():
GameServer.instance().endGame()
def showTimer(timer):
GameServer.instance().showTimer(timer.ID)
def hideTimer():
GameServer.instance().hideTimer()
def showMessage(message):
GameServer.instance().showMessage(message)
def showKeypad(password, object):
GameServer.instance().showKeypad(password, object.ID)
def showImageViewer(file):
GameServer.instance().showImageViewer(file)
def showAudioPlayer(file):
GameServer.instance().showAudioPlayer(file)
def showVideoPlayer(file):
GameServer.instance().showVideoPlayer(file)
def setGameOption(option, value):
GameServer.instance().setGameOption(option, value)
def getGameOption(option):
return GameServer.instance().getGameOption(option)
|
# -*- coding: utf-8 -*-
# @Time : 19-8-28 上午9:59
# @Author : Redtree
# @File : s_game_play_stage1.py
# @Desc : 选择移动对象阶段
from logic import loader
from data import player_runtime
from data import color_rgb
import pygame
import random
def dojob(x,y,is_mouse_down,cheros,keys):
# 结果显示在左下角
csz = pygame.transform.scale(loader.SZ[player_runtime.INFO['sz_num']], (60, 60))
loader.screen.blit(csz, (770, 600))
# 重置角色控制面板
player_runtime.INFO['ctb_codes'] = []
# 如果结果不为6,且阵营没有出战角色,则跳过回合
#测试的时候先都允许起飞
cheros = player_runtime.INFO['zdata'][player_runtime.INFO['turn']]
if not player_runtime.INFO['sz_num'] in [4, 5]:
in_war_flag = False
for cr in cheros:
if cr['in_war'] == True and cr['gowin']==False:
#白天僵尸无法行动
if player_runtime.INFO['round'] % 4 in [1, 2] and cr['code']==7:
pass
else:
player_runtime.INFO['ctb_codes'].append(cr['code'])
in_war_flag = True
if in_war_flag == False:
mention_words = ['提示', '无法行动,需掷出点数','大于5']
do_mention(mention_words)
#如果是玩家,手动点击跳过,如果是AI,1秒后跳过
if player_runtime.INFO['pa_turn'][player_runtime.INFO['turn']] == 0:
if player_runtime.INFO['is_mention'] == True and is_mouse_down == True:
player_runtime.INFO['is_mention'] = False
player_runtime.INFO['ctb_codes']=[]
# 跳到结算步骤b
player_runtime.INFO['stage'] = 5
else:
#实际的AI操作时间待调整反馈
if player_runtime.AITP['st11'] < 60:
player_runtime.AITP['st11'] = player_runtime.AITP['st11'] + 1
else:
player_runtime.INFO['is_mention'] = False
player_runtime.INFO['ctb_codes'] = []
# 跳到结算步骤b
player_runtime.AITP['st11'] = 0
player_runtime.INFO['stage'] = 5
else:
# 正常执行其他战场角色
control_heros_index = 0
for code in player_runtime.INFO['ctb_codes']:
chr = pygame.transform.scale(loader.RMS[code][0], (90, 85))
if control_heros_index == 0:
loader.screen.blit(chr, (860, 510))
elif control_heros_index == 1:
loader.screen.blit(chr, (950, 510))
elif control_heros_index == 2:
loader.screen.blit(chr, (860, 595))
elif control_heros_index == 3:
loader.screen.blit(chr, (950, 595))
control_heros_index = control_heros_index + 1
else:
# 选择要移动的角色
control_heros_index = 0
for c in cheros:
if c['gowin']==True:
pass
elif player_runtime.INFO['round'] % 4 in [1, 2] and c['code']==7 :
pass
else:
player_runtime.INFO['ctb_codes'].append(c['code'])
chr = pygame.transform.scale(loader.RMS[c['code']][0], (90, 85))
if control_heros_index == 0:
loader.screen.blit(chr, (860, 510))
elif control_heros_index == 1:
loader.screen.blit(chr, (950, 510))
elif control_heros_index == 2:
loader.screen.blit(chr, (860, 595))
elif control_heros_index == 3:
loader.screen.blit(chr, (950, 595))
control_heros_index = control_heros_index + 1
# 如果是玩家,手动选择操作英雄,如果是AI,随机选择
if player_runtime.INFO['pa_turn'][player_runtime.INFO['turn']] == 0:
# 英雄菜单交互 仅在阶段1可以选择英雄
# 1
if x >= 860 and x < 950 and y >= 510 and y < 595 and len(player_runtime.INFO['ctb_codes'])>0:
loader.screen.blit(loader.SELECT_MENU, (860, 510))
if is_mouse_down == True:
player_runtime.INFO['moving_code'] = player_runtime.INFO['ctb_codes'][0]
player_runtime.INFO['stage'] = 2
# 2
elif x >= 950 and x < 1040 and y >= 510 and y < 595 and len(player_runtime.INFO['ctb_codes'])>1:
loader.screen.blit(loader.SELECT_MENU, (950, 510))
if is_mouse_down == True:
player_runtime.INFO['moving_code'] = player_runtime.INFO['ctb_codes'][1]
player_runtime.INFO['stage'] = 2
# 3
elif x >= 860 and x < 950 and y >= 595 and y < 680 and len(player_runtime.INFO['ctb_codes'])>2:
loader.screen.blit(loader.SELECT_MENU, (860, 595))
if is_mouse_down == True:
player_runtime.INFO['moving_code'] = player_runtime.INFO['ctb_codes'][2]
player_runtime.INFO['stage'] = 2
# 4
elif x >= 950 and x < 1040 and y >= 595 and y < 680 and len(player_runtime.INFO['ctb_codes'])>3:
loader.screen.blit(loader.SELECT_MENU, (950, 595))
if is_mouse_down == True:
player_runtime.INFO['moving_code'] = player_runtime.INFO['ctb_codes'][3]
player_runtime.INFO['stage'] = 2
if keys['tab'] == 1:
player_runtime.INFO['inzhankuang'] = True
else:
clen = len(player_runtime.INFO['ctb_codes'])
if clen<1:
player_runtime.INFO['is_mention'] = False
player_runtime.INFO['ctb_codes'] = []
# 跳到结算步骤b
player_runtime.INFO['stage'] = 5
else:
'''
选择移动的角色,目前是随机的,没有策略,可在此处加入策略模块
'''
ai_choice = random.randint(1,len(player_runtime.INFO['ctb_codes']))
if ai_choice==1:
player_runtime.INFO['moving_code'] = player_runtime.INFO['ctb_codes'][0]
player_runtime.INFO['stage'] = 2
elif ai_choice==2:
player_runtime.INFO['moving_code'] = player_runtime.INFO['ctb_codes'][1]
player_runtime.INFO['stage'] = 2
elif ai_choice == 3:
player_runtime.INFO['moving_code'] = player_runtime.INFO['ctb_codes'][2]
player_runtime.INFO['stage'] = 2
elif ai_choice == 4:
player_runtime.INFO['moving_code'] = player_runtime.INFO['ctb_codes'][3]
player_runtime.INFO['stage'] = 2
#创造提示
def do_mention(text):
mx = 300
my = 200
loader.screen.blit(loader.MENTION,(mx,my))
t_index = 1
for t in text:
mtext = loader.GAME_ROUND_FONT.render(t, True,
color_rgb.BLACK,
None)
loader.screen.blit(mtext, (mx+50, my+50*t_index))
t_index = t_index+1
player_runtime.INFO['is_mention']=True
|
"use strict";
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; }
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
var {
GraphQLObjectType
} = require('graphql');
var tagMutations = require('./tags');
var deleteMutations = require('./delete');
var addMutations = require('./add');
var rehashMutations = require('./rehash');
var updateMutations = require('./update');
var versificationMutations = require('./versification');
var schemaFields = _objectSpread(_objectSpread(_objectSpread(_objectSpread(_objectSpread(_objectSpread({}, tagMutations), deleteMutations), addMutations), rehashMutations), updateMutations), versificationMutations);
var schemaMutations = new GraphQLObjectType({
name: 'Mutation',
description: 'Operations that change the state of Proskomma',
fields: schemaFields
});
module.exports = {
schemaMutations
};
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from .. import models
class SqlPoolConnectionPoliciesOperations(object):
"""SqlPoolConnectionPoliciesOperations operations.
You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
:ivar api_version: The API version to use for this operation. Constant value: "2019-06-01-preview".
:ivar connection_policy_name: The name of the connection policy. Constant value: "default".
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2019-06-01-preview"
self.connection_policy_name = "default"
self.config = config
def get(
self, resource_group_name, workspace_name, sql_pool_name, custom_headers=None, raw=False, **operation_config):
"""Get a Sql pool's connection policy, which is used with table auditing.
Get a Sql pool's connection policy, which is used with table auditing.
:param resource_group_name: The name of the resource group. The name
is case insensitive.
:type resource_group_name: str
:param workspace_name: The name of the workspace
:type workspace_name: str
:param sql_pool_name: SQL pool name
:type sql_pool_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: SqlPoolConnectionPolicy or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.synapse.models.SqlPoolConnectionPolicy or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorContractException<azure.mgmt.synapse.models.ErrorContractException>`
"""
# Construct URL
url = self.get.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'sqlPoolName': self._serialize.url("sql_pool_name", sql_pool_name, 'str'),
'connectionPolicyName': self._serialize.url("self.connection_policy_name", self.connection_policy_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1)
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorContractException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('SqlPoolConnectionPolicy', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/sqlPools/{sqlPoolName}/connectionPolicies/{connectionPolicyName}'}
|
import React, {Suspense} from 'react';
import logo from './logo.svg';
import './App.css';
// import User from "./User";
const User = React.lazy(() => import('./User'));
function App() {
return (
<div className="App">
<header className="App-header">
<img src={`${process.env.REACT_APP_CONTENT_HOST}${logo}`} className="App-logo" alt="logo" />
<p>
Edit <code>src/App.js</code> and save to reload.
</p>
<a
className="App-link"
href="https://reactjs.org"
target="_blank"
rel="noopener noreferrer">
<Suspense fallback={'Loading...'}>
<User />
</Suspense>
</a>
</header>
</div>
);
}
export default App;
|
from flask import Flask, request, jsonify
from pyknow import Fact
from maximum_example import compute_max
from robot_example import TrafficLight, robot
app = Flask(__name__)
@app.route('/example/robot/', methods=['POST'])
def pyknow_example():
"""
Receives a traffic light color and passes it to the robot engine.
Returns:
Engine response in json format
"""
light = request.get_json().get('light', None)
if light is None or light not in ['green', 'red', 'yellow', 'blinking_yellow']:
return 'That is not a valid light color...'
robot.reset()
robot.declare(TrafficLight(color=light))
robot.run()
return jsonify({'robot_response': robot.response})
@app.route('/example/maximum/', methods=['POST'])
def maximum_example():
"""
Receives a list of integers and passes it to the maximum machine.
Returns:
Engine response in json format.
"""
compute_max.reset()
compute_max.declare(*[Fact(val=n) for n in set(
[int(x) for x in request.get_json().get('find_max_of', [])]
)
])
compute_max.run()
return jsonify({'maximum_response': compute_max.response})
if __name__ == '__main__':
app.run(debug=True, port=5000)
|
from django.conf.urls import url
from . import views
from django.conf.urls.static import static
from django.conf import settings
urlpatterns=[
url('^$',views.home_page,name = 'home_page'),
url(r'^edit$', views.edit, name='edit_profile'),
url(r'^upload/$', views.upload_business, name='upload_business'),
url(r'^hood/$', views.add_hood, name='add_hood'),
url(r'^join(?P<neighborhood_id>\d+)',views.join, name='join'),
url(r'^leave/(?P<neighborhood_id>\d+)',views.leave, name='leave'),
url(r'^one_hood(?P<neighborhood_id>\d+)',views.hood, name='hood'),
url(r'^comment/(?P<post_id>\d+)', views.one_post, name='comment'),
url(r'^post/$', views.add_post,name='add_post'),
url(r'^search/', views.search_results, name='search_results'),
]
if settings.DEBUG:
urlpatterns+= static(settings.MEDIA_URL, document_root = settings.MEDIA_ROOT)
|
#
# This file is part of LiteSPI
#
# Copyright (c) 2020 Antmicro <www.antmicro.com>
# SPDX-License-Identifier: BSD-2-Clause
import unittest
from migen import *
from litespi.core.mmap import LiteSPIMMAP
from litespi.common import *
from litespi.opcodes import SpiNorFlashOpCodes as Codes
from litespi.spi_nor_flash_module import SpiNorFlashModule
from litespi.ids import SpiNorFlashManufacturerIDs
class TestSPIMMAP(unittest.TestCase):
class DummyChip(SpiNorFlashModule):
manufacturer_id = SpiNorFlashManufacturerIDs.NONJEDEC
device_id = 0x0204
name = "dummychip1"
total_size = 2097152 # bytes
page_size = 256 # bytes
total_pages = 8192
supported_opcodes = [
Codes.READ_1_1_1,
Codes.PP_1_1_1
]
dummy_bits = 8
def test_spi_mmap_core_syntax(self):
spi_mmap = LiteSPIMMAP(flash=self.DummyChip(Codes.READ_1_1_1, []))
def test_spi_mmap_read_test(self):
opcode = Codes.READ_1_1_1
dut = LiteSPIMMAP(flash=self.DummyChip(opcode, []))
def wb_gen(dut, addr, data):
dut.data_ok = 0
yield dut.bus.adr.eq(addr)
yield dut.bus.we.eq(0)
yield dut.bus.cyc.eq(1)
yield dut.bus.stb.eq(1)
while (yield dut.bus.ack) == 0:
yield
print((yield dut.bus.dat_r))
if (yield dut.bus.dat_r) == data:
dut.data_ok = 1
def phy_gen(dut, addr, data):
dut.addr_ok = 0
dut.opcode_ok = 0
dut.cmd_ok = 0
dut.no_dummy = 0
yield dut.sink.valid.eq(0)
yield dut.source.ready.eq(1)
while (yield dut.source.valid) == 0:
yield
# READ CMD
if (yield dut.source.data) == opcode.code: # cmd ok
dut.opcode_ok = 1
yield
yield dut.sink.valid.eq(1)
while (yield dut.source.valid) == 0:
yield
yield dut.sink.valid.eq(0)
# READ ADDR
if (yield dut.source.data) == (addr<<2): # address cmd
dut.addr_ok = 1
yield
yield dut.sink.valid.eq(1)
while (yield dut.source.valid) == 0:
yield
yield dut.sink.valid.eq(0)
# NO DUMMY, mask should be 0 cause we attept to read data
if (yield dut.source.mask) == 0:
dut.no_dummy = 1
# SEND DATA
yield dut.source.ready.eq(0)
yield
yield dut.sink.data.eq(data)
yield dut.sink.valid.eq(1)
while (yield dut.sink.ready) == 0:
yield
yield
yield dut.sink.valid.eq(0)
yield
addr = 0xcafe
data = 0xdeadbeef
run_simulation(dut, [wb_gen(dut, addr, data), phy_gen(dut, addr, data)])
self.assertEqual(dut.data_ok, 1)
self.assertEqual(dut.addr_ok, 1)
self.assertEqual(dut.opcode_ok, 1)
self.assertEqual(dut.no_dummy, 1)
|
#! /usr/bin/env python
# File: curly.py
# Author: Mae Morella
#
# A very simple HTTPS client, using the Python requests module
# Extends the code in requests_client.py
import sys
import requests
import argparse
import logging
import pprint
# Take URL and file input by parsing command-line args
parser = argparse.ArgumentParser(
description='Retrieves a single webpage, and writes it to a file.')
parser.add_argument('-v', '--verbose', action='store_true',
help="Prints connection headers")
parser.add_argument('url', metavar="URL", type=str,
help='A URL to send a request to')
parser.add_argument('file', nargs="?", help='Output file to write to.',
type=argparse.FileType('wb'), default=sys.stdout)
args = parser.parse_args()
url = args.url
outfile = args.file
logging.basicConfig(format="",
level=(logging.DEBUG if args.verbose else logging.INFO))
try:
# Send HTTP GET request
res = requests.get(url)
# Print debug info
elapsed_ms = int(res.elapsed.total_seconds() * 1000)
logging.debug(
f"Got response status {res.status_code} in {elapsed_ms} ms")
logging.debug(res.headers)
# for stdout, decode response
if (outfile is sys.stdout):
print(res.text)
else:
logging.info(
f"Writing response data to {outfile.name} ({len(res.content)} bytes)")
outfile.write(res.content)
sys.exit(0)
except Exception as e:
print("ERR:", e, file=sys.stderr)
sys.exit(1)
|
deepmacDetailCallback("5c857e200000/28",[{"d":"2020-05-20","t":"add","s":"ieee-mam.csv","a":"Limited Flat/RM 705A, 7/F, New East Ocean Centre No. 9 Science Museum Road Kowloon Hong Kong CN 000000","c":"CN","o":"mobilogix HongKong"}]);
|
import asyncio
import pytest
from click.testing import CliRunner
pytest.importorskip("requests")
import os
import socket
from multiprocessing import cpu_count
from time import sleep
import requests
import distributed.cli.dask_worker
from distributed import Client, Scheduler
from distributed.compatibility import LINUX
from distributed.deploy.utils import nprocesses_nthreads
from distributed.metrics import time
from distributed.utils import parse_ports, sync, tmpfile
from distributed.utils_test import gen_cluster, popen, terminate_process, wait_for_port
def test_nanny_worker_ports(loop):
with popen(["dask-scheduler", "--port", "9359", "--no-dashboard"]):
with popen(
[
"dask-worker",
"127.0.0.1:9359",
"--host",
"127.0.0.1",
"--worker-port",
"9684",
"--nanny-port",
"5273",
"--no-dashboard",
]
):
with Client("127.0.0.1:9359", loop=loop) as c:
start = time()
while True:
d = sync(c.loop, c.scheduler.identity)
if d["workers"]:
break
else:
assert time() - start < 60
sleep(0.1)
assert (
d["workers"]["tcp://127.0.0.1:9684"]["nanny"]
== "tcp://127.0.0.1:5273"
)
@pytest.mark.slow
def test_nanny_worker_port_range(loop):
with popen(["dask-scheduler", "--port", "9359", "--no-dashboard"]) as sched:
nprocs = 3
worker_port = "9684:9686"
nanny_port = "9688:9690"
with popen(
[
"dask-worker",
"127.0.0.1:9359",
"--nprocs",
f"{nprocs}",
"--host",
"127.0.0.1",
"--worker-port",
worker_port,
"--nanny-port",
nanny_port,
"--no-dashboard",
]
):
with Client("127.0.0.1:9359", loop=loop) as c:
start = time()
while len(c.scheduler_info()["workers"]) < nprocs:
sleep(0.1)
assert time() - start < 60
def get_port(dask_worker):
return dask_worker.port
expected_worker_ports = set(parse_ports(worker_port))
worker_ports = c.run(get_port)
assert set(worker_ports.values()) == expected_worker_ports
expected_nanny_ports = set(parse_ports(nanny_port))
nanny_ports = c.run(get_port, nanny=True)
assert set(nanny_ports.values()) == expected_nanny_ports
def test_nanny_worker_port_range_too_many_workers_raises(loop):
with popen(["dask-scheduler", "--port", "9359", "--no-dashboard"]):
with popen(
[
"dask-worker",
"127.0.0.1:9359",
"--nprocs",
"3",
"--host",
"127.0.0.1",
"--worker-port",
"9684:9685",
"--nanny-port",
"9686:9687",
"--no-dashboard",
]
) as worker:
assert any(
b"Could not start" in worker.stderr.readline() for _ in range(100)
)
def test_memory_limit(loop):
with popen(["dask-scheduler", "--no-dashboard"]):
with popen(
[
"dask-worker",
"127.0.0.1:8786",
"--memory-limit",
"2e3MB",
"--no-dashboard",
]
):
with Client("127.0.0.1:8786", loop=loop) as c:
while not c.nthreads():
sleep(0.1)
info = c.scheduler_info()
[d] = info["workers"].values()
assert isinstance(d["memory_limit"], int)
assert d["memory_limit"] == 2e9
def test_no_nanny(loop):
with popen(["dask-scheduler", "--no-dashboard"]):
with popen(
["dask-worker", "127.0.0.1:8786", "--no-nanny", "--no-dashboard"]
) as worker:
assert any(b"Registered" in worker.stderr.readline() for i in range(15))
@pytest.mark.slow
@pytest.mark.parametrize("nanny", ["--nanny", "--no-nanny"])
def test_no_reconnect(nanny, loop):
with popen(["dask-scheduler", "--no-dashboard"]) as sched:
wait_for_port(("127.0.0.1", 8786))
with popen(
[
"dask-worker",
"tcp://127.0.0.1:8786",
"--no-reconnect",
nanny,
"--no-dashboard",
]
) as worker:
sleep(2)
terminate_process(sched)
start = time()
while worker.poll() is None:
sleep(0.1)
assert time() < start + 30
def test_resources(loop):
with popen(["dask-scheduler", "--no-dashboard"]):
with popen(
[
"dask-worker",
"tcp://127.0.0.1:8786",
"--no-dashboard",
"--resources",
"A=1 B=2,C=3",
]
):
with Client("127.0.0.1:8786", loop=loop) as c:
while not c.scheduler_info()["workers"]:
sleep(0.1)
info = c.scheduler_info()
worker = list(info["workers"].values())[0]
assert worker["resources"] == {"A": 1, "B": 2, "C": 3}
@pytest.mark.parametrize("nanny", ["--nanny", "--no-nanny"])
def test_local_directory(loop, nanny):
with tmpfile() as fn:
with popen(["dask-scheduler", "--no-dashboard"]):
with popen(
[
"dask-worker",
"127.0.0.1:8786",
nanny,
"--no-dashboard",
"--local-directory",
fn,
]
):
with Client("127.0.0.1:8786", loop=loop, timeout=10) as c:
start = time()
while not c.scheduler_info()["workers"]:
sleep(0.1)
assert time() < start + 8
info = c.scheduler_info()
worker = list(info["workers"].values())[0]
assert worker["local_directory"].startswith(fn)
@pytest.mark.parametrize("nanny", ["--nanny", "--no-nanny"])
def test_scheduler_file(loop, nanny):
with tmpfile() as fn:
with popen(["dask-scheduler", "--no-dashboard", "--scheduler-file", fn]):
with popen(
["dask-worker", "--scheduler-file", fn, nanny, "--no-dashboard"]
):
with Client(scheduler_file=fn, loop=loop) as c:
start = time()
while not c.scheduler_info()["workers"]:
sleep(0.1)
assert time() < start + 10
def test_scheduler_address_env(loop, monkeypatch):
monkeypatch.setenv("DASK_SCHEDULER_ADDRESS", "tcp://127.0.0.1:8786")
with popen(["dask-scheduler", "--no-dashboard"]):
with popen(["dask-worker", "--no-dashboard"]):
with Client(os.environ["DASK_SCHEDULER_ADDRESS"], loop=loop) as c:
start = time()
while not c.scheduler_info()["workers"]:
sleep(0.1)
assert time() < start + 10
def test_nprocs_requires_nanny(loop):
with popen(["dask-scheduler", "--no-dashboard"]):
with popen(
["dask-worker", "127.0.0.1:8786", "--nprocs=2", "--no-nanny"]
) as worker:
assert any(
b"Failed to launch worker" in worker.stderr.readline()
for i in range(15)
)
def test_nprocs_negative(loop):
with popen(["dask-scheduler", "--no-dashboard"]):
with popen(["dask-worker", "127.0.0.1:8786", "--nprocs=-1"]):
with Client("tcp://127.0.0.1:8786", loop=loop) as c:
c.wait_for_workers(cpu_count(), timeout="10 seconds")
def test_nprocs_auto(loop):
with popen(["dask-scheduler", "--no-dashboard"]):
with popen(["dask-worker", "127.0.0.1:8786", "--nprocs=auto"]):
with Client("tcp://127.0.0.1:8786", loop=loop) as c:
procs, _ = nprocesses_nthreads()
c.wait_for_workers(procs, timeout="10 seconds")
def test_nprocs_expands_name(loop):
with popen(["dask-scheduler", "--no-dashboard"]):
with popen(["dask-worker", "127.0.0.1:8786", "--nprocs", "2", "--name", "0"]):
with popen(["dask-worker", "127.0.0.1:8786", "--nprocs", "2"]):
with Client("tcp://127.0.0.1:8786", loop=loop) as c:
start = time()
while len(c.scheduler_info()["workers"]) < 4:
sleep(0.2)
assert time() < start + 30
info = c.scheduler_info()
names = [d["name"] for d in info["workers"].values()]
foos = [n for n in names if n.startswith("0-")]
assert len(foos) == 2
assert len(set(names)) == 4
@pytest.mark.skipif(not LINUX, reason="Need 127.0.0.2 to mean localhost")
@pytest.mark.parametrize("nanny", ["--nanny", "--no-nanny"])
@pytest.mark.parametrize(
"listen_address", ["tcp://0.0.0.0:39837", "tcp://127.0.0.2:39837"]
)
def test_contact_listen_address(loop, nanny, listen_address):
with popen(["dask-scheduler", "--no-dashboard"]):
with popen(
[
"dask-worker",
"127.0.0.1:8786",
nanny,
"--no-dashboard",
"--contact-address",
"tcp://127.0.0.2:39837",
"--listen-address",
listen_address,
]
):
with Client("127.0.0.1:8786") as client:
while not client.nthreads():
sleep(0.1)
info = client.scheduler_info()
assert "tcp://127.0.0.2:39837" in info["workers"]
# roundtrip works
assert client.submit(lambda x: x + 1, 10).result() == 11
def func(dask_worker):
return dask_worker.listener.listen_address
assert client.run(func) == {"tcp://127.0.0.2:39837": listen_address}
@pytest.mark.skipif(not socket.has_ipv6, reason="Needs IPv6 support to test")
@pytest.mark.parametrize("nanny", ["--nanny", "--no-nanny"])
@pytest.mark.parametrize("listen_address", ["tcp://:39838", "tcp://[::1]:39838"])
def test_listen_address_ipv6(loop, nanny, listen_address):
with popen(["dask-scheduler", "--no-dashboard"]):
with popen(
[
"dask-worker",
"127.0.0.1:8786",
nanny,
"--no-dashboard",
"--listen-address",
listen_address,
]
):
# IPv4 used by default for name of global listener; IPv6 used by default when
# listening only on IPv6.
bind_all = "[::1]" not in listen_address
expected_ip = "127.0.0.1" if bind_all else "[::1]"
expected_name = f"tcp://{expected_ip}:39838"
expected_listen = "tcp://0.0.0.0:39838" if bind_all else listen_address
with Client("127.0.0.1:8786") as client:
while not client.nthreads():
sleep(0.1)
info = client.scheduler_info()
assert expected_name in info["workers"]
assert client.submit(lambda x: x + 1, 10).result() == 11
def func(dask_worker):
return dask_worker.listener.listen_address
assert client.run(func) == {expected_name: expected_listen}
@pytest.mark.skipif(not LINUX, reason="Need 127.0.0.2 to mean localhost")
@pytest.mark.parametrize("nanny", ["--nanny", "--no-nanny"])
@pytest.mark.parametrize("host", ["127.0.0.2", "0.0.0.0"])
def test_respect_host_listen_address(loop, nanny, host):
with popen(["dask-scheduler", "--no-dashboard"]):
with popen(
["dask-worker", "127.0.0.1:8786", nanny, "--no-dashboard", "--host", host]
) as worker:
with Client("127.0.0.1:8786") as client:
while not client.nthreads():
sleep(0.1)
client.scheduler_info()
# roundtrip works
assert client.submit(lambda x: x + 1, 10).result() == 11
def func(dask_worker):
return dask_worker.listener.listen_address
listen_addresses = client.run(func)
assert all(host in v for v in listen_addresses.values())
def test_dashboard_non_standard_ports(loop):
pytest.importorskip("bokeh")
try:
import jupyter_server_proxy # noqa: F401
proxy_exists = True
except ImportError:
proxy_exists = False
with popen(["dask-scheduler", "--port", "3449"]):
with popen(
[
"dask-worker",
"tcp://127.0.0.1:3449",
"--dashboard-address",
":4833",
"--host",
"127.0.0.1",
]
):
with Client("127.0.0.1:3449", loop=loop) as c:
c.wait_for_workers(1)
pass
response = requests.get("http://127.0.0.1:4833/status")
assert response.ok
redirect_resp = requests.get("http://127.0.0.1:4833/main")
redirect_resp.ok
# TEST PROXYING WORKS
if proxy_exists:
url = "http://127.0.0.1:8787/proxy/4833/127.0.0.1/status"
response = requests.get(url)
assert response.ok
with pytest.raises(Exception):
requests.get("http://localhost:4833/status/")
def test_version_option():
runner = CliRunner()
result = runner.invoke(distributed.cli.dask_worker.main, ["--version"])
assert result.exit_code == 0
@pytest.mark.slow
@pytest.mark.parametrize("no_nanny", [True, False])
def test_worker_timeout(no_nanny):
runner = CliRunner()
args = ["192.168.1.100:7777", "--death-timeout=1"]
if no_nanny:
args.append("--no-nanny")
result = runner.invoke(distributed.cli.dask_worker.main, args)
assert result.exit_code != 0
def test_bokeh_deprecation():
pytest.importorskip("bokeh")
runner = CliRunner()
with pytest.warns(UserWarning, match="dashboard"):
try:
runner.invoke(distributed.cli.dask_worker.main, ["--bokeh"])
except ValueError:
# didn't pass scheduler
pass
with pytest.warns(UserWarning, match="dashboard"):
try:
runner.invoke(distributed.cli.dask_worker.main, ["--no-bokeh"])
except ValueError:
# didn't pass scheduler
pass
@gen_cluster(nthreads=[])
async def test_integer_names(s):
with popen(["dask-worker", s.address, "--name", "123"]):
while not s.workers:
await asyncio.sleep(0.01)
[ws] = s.workers.values()
assert ws.name == 123
@pytest.mark.asyncio
@pytest.mark.parametrize("nanny", ["--nanny", "--no-nanny"])
async def test_worker_class(cleanup, tmp_path, nanny):
# Create module with custom worker class
WORKER_CLASS_TEXT = """
from distributed.worker import Worker
class MyWorker(Worker):
pass
"""
tmpdir = str(tmp_path)
tmpfile = str(tmp_path / "myworker.py")
with open(tmpfile, "w") as f:
f.write(WORKER_CLASS_TEXT)
# Put module on PYTHONPATH
env = os.environ.copy()
if "PYTHONPATH" in env:
env["PYTHONPATH"] = tmpdir + ":" + env["PYTHONPATH"]
else:
env["PYTHONPATH"] = tmpdir
async with Scheduler(dashboard_address=":0") as s:
async with Client(s.address, asynchronous=True) as c:
with popen(
[
"dask-worker",
s.address,
nanny,
"--worker-class",
"myworker.MyWorker",
],
env=env,
):
await c.wait_for_workers(1)
def worker_type(dask_worker):
return type(dask_worker).__name__
worker_types = await c.run(worker_type)
assert all(name == "MyWorker" for name in worker_types.values())
|
require('./lib/build');
|
import flask
from flask_coralillo import Coralillo
import unittest
class InitTestCase(unittest.TestCase):
def setUp(self):
self.app = flask.Flask(__name__)
def test_constructor(self):
"""Test that a constructor with app instance will initialize the
connection"""
coralillo = Coralillo(self.app)
assert hasattr(coralillo, 'lua')
assert hasattr(coralillo, 'redis')
def test_init_app(self):
"""Test that a constructor without app instance will not initialize the
connection.
After Coralillo.init_app(app) is called, the connection will be
initialized."""
coralillo = Coralillo()
assert coralillo._engine is None
coralillo.init_app(self.app)
assert coralillo._engine is not None
assert coralillo.redis is not None
assert coralillo.lua is not None
def test_custom_prefix(self):
"""Test that config prefixes enable distinct connections"""
self.app.config['DBA_URL'] = 'redis://localhost:6379/1'
self.app.config['DBB_URL'] = 'redis://localhost:6379/2'
coralillo_a = Coralillo(self.app, config_prefix='DBA')
coralillo_b = Coralillo(self.app, config_prefix='DBB')
assert coralillo_a.redis.connection_pool.connection_kwargs['db'] == 1
assert coralillo_b.redis.connection_pool.connection_kwargs['db'] == 2
def test_custom_id_function(self):
def simple_ids():
from random import choice
return ''.join(choice('1234567890') for c in range(10))
eng = Coralillo(self.app)
custom_eng = Coralillo(self.app, id_function=simple_ids)
old_id = eng.id_function()
new_id = custom_eng.id_function()
self.assertEqual(len(old_id), 32)
self.assertEqual(len(new_id), 10)
if __name__ == '__main__':
unittest.main()
|
var Parse = require('parse').Parse;
var ParseReact = require('parse-react');
var appConstants = require('../constants/appConstants');
var sessionUtils = require('./sessionUtils');
var githubUtils = require('./githubUtils');
var https = require('https');
var parseUtils = {
// function for server side code
// to call to check if a user exists
serverLogin: function(githubData, serverResponse, accessToken) {
var _this = this;
// Initialize Parse for Server Side queries
Parse.initialize(appConstants.PARSE_APP_ID, appConstants.PARSE_JS_KEY, appConstants.PARSE_MASTER_KEY);
// Enables us to login the user from the
// server side (remove if we find another way
// to get the session token)
Parse.User.enableUnsafeCurrentUser();
// store github data
this.githubData = githubData;
this.serverResponse = serverResponse;
this.sessionUtils = sessionUtils;
this.accessToken = accessToken;
var path = '/1/login?username=';
path += encodeURIComponent(githubData.login);
path += '&password=';
path += encodeURIComponent(String(githubData.id));
https.get({
host: 'api.parse.com',
path: path,
headers: {
"X-Parse-Application-Id":appConstants.PARSE_APP_ID,
"X-Parse-REST-API-Key":appConstants.REST_API_KEY,
}
}, function(response) {
// Continuously update stream with data
var body = '';
response.on('data', function(d) {
body += d;
});
response.on('end', function() {
// Data reception is done, do whatever with it!
var jsonResponse = JSON.parse(body);
if(jsonResponse.error){
process.stdout.write("Error reading from parse API: ");
process.stdout.write(jsonResponse.error);
}
if(jsonResponse.sessionToken) {
this.sessionUtils.createNewSession(jsonResponse.sessionToken, this.serverResponse);
} else {
this.serverSignUp(this.accessToken);
}
}.bind(this));
}.bind(this));
},
serverSignUp: function(accessToken) {
var user = new Parse.User();
var data = this.githubData;
user.set("username", data.login);
user.set("password", String(data.id));
if(data.email && data.email.length) {
user.set("email", data.email);
};
if(data.avatar_url && data.avatar_url.length) {
user.set("avatar_url", data.avatar_url);
}
if(data.name && data.name.length) {
user.set("name", data.name);
}
if(data.location && data.location.length) {
user.set("location", data.location);
}
if(accessToken && accessToken.length) {
user.set("access_token", accessToken);
}
process.stdout.write("RETURN DATA: \n");
process.stdout.write(JSON.stringify(data));
user.signUp(null, {
success: function(user) {
// Hooray! User signed up
sessionUtils.createNewSession(user._sessionToken, this.serverResponse);
process.stdout.write("USER DATA: ");
process.stdout.write(JSON.stringify(user));
process.stdout.write("\n");
}.bind(this),
error: function(user, error) {
console.log("Error: " + error.code + " " + error.message);
process.stdout.write("ERROR SIGNING UP: ");
process.stdout.write("Error signing up user: ");
process.stdout.write(error.code + " " + error.message);
}
});
},
clientLogin: function(sessionToken) {
Parse.User.become(sessionToken).then(function(user) {
ParseReact.currentUser.update();
if(user && user.attributes && !user.attributes.email) {
githubUtils.getEmail(user, this.updateUser);
}
}.bind(this));
},
updateUser: function(user, data) {
var parseObj = {
className: '_User',
objectId: user.id
};
ParseReact.Mutation.Set(parseObj, data).dispatch();
},
logout: function() {
Parse.User.logOut();
}
};
module.exports = parseUtils;
|
// Execute the callback for each property in source then return a new object with the same properties as source.
export const map = (source, callback) => Object.keys(source).reduce((reducer, prop) => {
reducer[prop] = callback(source[prop], prop);
return reducer;
}, {});
// For each property in source, execute the predicate (expects a boolean return value). If true, the property will be copied into the resulting object.
export const filter = (source, predicate) => Object.keys(source).reduce((reducer, prop) => {
if (predicate(source[prop])) {
reducer[prop] = source[prop];
}
return reducer;
}, {});
// For each proeprty in source, execute the callback. The reducer is shared across all callback invocations. It's up to the callback to remember to return the reducer though!
export const each = (source, callback, reducer) => Object.keys(source).reduce((reducer, prop) => {
reducer = callback(source[prop], prop);
return reducer;
}, reducer);
// Partial currying: wraps around the source and allows you to execute any of the objectHelper methods on it.
export default (source) => ({
map: (callback) => map(source, callback),
filter: (callback) => filter(source, callback),
each: (callback, reducer) => each(source, callback, reducer)
});
|
/**
* Please refer to the following files in the root directory:
*
* README.md For information about the package.
* LICENSE For license details, copyrights and restrictions.
*/
import { BaseCalc, WtCalculatorError } from './baseCalc';
/**
* O'Conner 1RM calculator.
*/
export default class OConnerCalc extends BaseCalc
{
/**
* Constructor.
*
* @param {object} opts Data options.
* @param {object} defs Calc definitions.
*
* @return {BaseCalc}
*/
constructor(opts, defs = null)
{
super(opts, defs);
}
/**
* Calculate multiplier.
*
* @return {float}
*
* @throws {WtCalculatorError} If we don't have required options.
*/
calcMult()
{
if (!this.checkOpts(['reps'])) {
throw new WtCalculatorError(`OConner calculation does not have required options (multi).`);
}
if (1 === this.opts.reps) {
return 1;
}
return (1 + (this.opts.reps / 40));
}
/**
* Calculate.
*
* @return {object}
*
* @throws {WtCalculatorError} If we don't have required options.
*/
calc()
{
if (!this.checkOpts(['liftedWeight'])) {
throw new WtCalculatorError(`OConner calculation does not have required options.`);
}
let mult = this.calcMult();
return {val: this.opts.liftedWeight * mult, _mult: mult};
}
}
|
from __future__ import absolute_import
from __future__ import division
import torch
from torch import nn
from torch.nn import functional as F
import torchvision
__all__ = ['ResNet50', 'ResNet101', 'ResNet50M']
class ResNet50(nn.Module):
def __init__(self, num_classes, loss={'xent'}, **kwargs):
super(ResNet50, self).__init__()
self.loss = loss
resnet50 = torchvision.models.resnet50()
self.base = nn.Sequential(*list(resnet50.children())[:-2])
self.classifier = nn.Linear(2048, num_classes)
self.feat_dim = 2048
def forward(self, x):
x = self.base(x)
x = F.avg_pool2d(x, x.size()[2:])
f = x.view(x.size(0), -1)
if not self.training:
return f
y = self.classifier(f)
if self.loss == {'xent'}:
return y
elif self.loss == {'xent', 'htri'}:
return y, f
else:
raise KeyError("Unsupported loss: {}".format(self.loss))
class ResNet101(nn.Module):
def __init__(self, num_classes, loss={'xent'}, **kwargs):
super(ResNet101, self).__init__()
self.loss = loss
resnet101 = torchvision.models.resnet101(pretrained=True)
self.base = nn.Sequential(*list(resnet101.children())[:-2])
self.classifier = nn.Linear(2048, num_classes)
self.feat_dim = 2048 # feature dimension
def forward(self, x):
x = self.base(x)
x = F.avg_pool2d(x, x.size()[2:])
f = x.view(x.size(0), -1)
if not self.training:
return f
y = self.classifier(f)
if self.loss == {'xent'}:
return y
elif self.loss == {'xent', 'htri'}:
return y, f
else:
raise KeyError("Unsupported loss: {}".format(self.loss))
class ResNet50M(nn.Module):
"""ResNet50 + mid-level features.
Reference:
Yu et al. The Devil is in the Middle: Exploiting Mid-level Representations for
Cross-Domain Instance Matching. arXiv:1711.08106.
"""
def __init__(self, num_classes=0, loss={'xent'}, **kwargs):
super(ResNet50M, self).__init__()
self.loss = loss
resnet50 = torchvision.models.resnet50(pretrained=True)
base = nn.Sequential(*list(resnet50.children())[:-2])
self.layers1 = nn.Sequential(base[0], base[1], base[2])
self.layers2 = nn.Sequential(base[3], base[4])
self.layers3 = base[5]
self.layers4 = base[6]
self.layers5a = base[7][0]
self.layers5b = base[7][1]
self.layers5c = base[7][2]
self.fc_fuse = nn.Sequential(nn.Linear(4096, 1024), nn.BatchNorm1d(1024), nn.ReLU())
self.classifier = nn.Linear(3072, num_classes)
self.feat_dim = 3072 # feature dimension
def forward(self, x):
x1 = self.layers1(x)
x2 = self.layers2(x1)
x3 = self.layers3(x2)
x4 = self.layers4(x3)
x5a = self.layers5a(x4)
x5b = self.layers5b(x5a)
x5c = self.layers5c(x5b)
x5a_feat = F.avg_pool2d(x5a, x5a.size()[2:]).view(x5a.size(0), x5a.size(1))
x5b_feat = F.avg_pool2d(x5b, x5b.size()[2:]).view(x5b.size(0), x5b.size(1))
x5c_feat = F.avg_pool2d(x5c, x5c.size()[2:]).view(x5c.size(0), x5c.size(1))
midfeat = torch.cat((x5a_feat, x5b_feat), dim=1)
midfeat = self.fc_fuse(midfeat)
combofeat = torch.cat((x5c_feat, midfeat), dim=1)
if not self.training:
return combofeat
prelogits = self.classifier(combofeat)
if self.loss == {'xent'}:
return prelogits
elif self.loss == {'xent', 'htri'}:
return prelogits, combofeat
else:
raise KeyError("Unsupported loss: {}".format(self.loss))
|
import logging, copy
import mythril.laser.ethereum.util as helper
class TaintRecord:
"""
TaintRecord contains tainting information for a specific (state, node)
the information specifies the taint status before executing the operation belonging to the state
"""
def __init__(self):
""" Builds a taint record """
self.stack = []
self.memory = {}
self.storage = {}
self.states = []
def stack_tainted(self, index):
""" Returns taint value of stack element at index"""
if index < len(self.stack):
return self.stack[index]
return None
def memory_tainted(self, index):
""" Returns taint value of memory element at index"""
if index in self.memory.keys():
return self.memory[index]
return False
def storage_tainted(self, index):
""" Returns taint value of storage element at index"""
if index in self.storage.keys():
return self.storage[index]
return False
def add_state(self, state):
""" Adds state with this taint record """
self.states.append(state)
def clone(self):
""" Clones this record"""
clone = TaintRecord()
clone.stack = copy.deepcopy(self.stack)
clone.memory = copy.deepcopy(self.memory)
clone.storage = copy.deepcopy(self.storage)
return clone
class TaintResult:
""" Taint analysis result obtained after having ran the taint runner"""
def __init__(self):
self.records = []
def check(self, state, stack_index):
"""
Checks if stack variable is tainted, before executing the instruction
:param state: state to check variable in
:param stack_index: index of stack variable
:return: tainted
"""
record = self._try_get_record(state)
if record is None:
return None
return record.stack_tainted(stack_index)
def add_records(self, records):
""" Adds records to this taint result """
self.records += records
def _try_get_record(self, state):
""" Finds record belonging to the state """
for record in self.records:
if state in record.states:
return record
return None
class TaintRunner:
"""
Taint runner, is able to run taint analysis on symbolic execution result
"""
@staticmethod
def execute(statespace, node, state, initial_stack=[]):
"""
Runs taint analysis on the statespace
:param statespace: symbolic statespace to run taint analysis on
:param node: taint introduction node
:param state: taint introduction state
:param stack_indexes: stack indexes to introduce taint
:return: TaintResult object containing analysis results
"""
result = TaintResult()
# Build initial current_node
init_record = TaintRecord()
init_record.stack = initial_stack
state_index = node.states.index(state)
# List of (Node, TaintRecord, index)
current_nodes = [(node, init_record, state_index)]
environment = node.states[0].environment
for node, record, index in current_nodes:
records = TaintRunner.execute_node(node, record, index)
result.add_records(records)
children = TaintRunner.children(node, statespace, environment)
for child in children:
current_nodes.append((child, records[-1], 0))
return result
@staticmethod
def children(node, statespace, environment):
direct_children = [statespace.nodes[edge.node_to] for edge in statespace.edges if edge.node_from == node.uid]
children = []
for child in direct_children:
if child.states[0].environment == environment:
children.append(child)
else:
children += TaintRunner.children(child, statespace, environment)
return children
@staticmethod
def execute_node(node, last_record, state_index=0):
"""
Runs taint analysis on a given node
:param node: node to analyse
:param last_record: last taint record to work from
:param state_index: state index to start from
:return: List of taint records linked to the states in this node
"""
records = [last_record]
for index in range(state_index, len(node.states)):
current_state = node.states[index]
records.append(TaintRunner.execute_state(records[-1], current_state))
return records[1:]
@staticmethod
def execute_state(record, state):
assert len(state.mstate.stack) == len(record.stack)
""" Runs taint analysis on a state """
record.add_state(state)
new_record = record.clone()
# Apply Change
op = state.get_current_instruction()['opcode']
if op in TaintRunner.stack_taint_table.keys():
mutator = TaintRunner.stack_taint_table[op]
TaintRunner.mutate_stack(new_record, mutator)
elif op.startswith("PUSH"):
TaintRunner.mutate_push(op, new_record)
elif op.startswith("DUP"):
TaintRunner.mutate_dup(op, new_record)
elif op.startswith("SWAP"):
TaintRunner.mutate_swap(op, new_record)
elif op is "MLOAD":
TaintRunner.mutate_mload(new_record, state.mstate.stack[-1])
elif op.startswith("MSTORE"):
TaintRunner.mutate_mstore(new_record, state.mstate.stack[-1])
elif op is "SLOAD":
TaintRunner.mutate_sload(new_record, state.mstate.stack[-1])
elif op is "SSTORE":
TaintRunner.mutate_sstore(new_record, state.mstate.stack[-1])
elif op.startswith("LOG"):
TaintRunner.mutate_log(new_record, op)
elif op in ('CALL', 'CALLCODE', 'DELEGATECALL', 'STATICCALL'):
TaintRunner.mutate_call(new_record, op)
else:
logging.debug("Unknown operation encountered: {}".format(op))
return new_record
@staticmethod
def mutate_stack(record, mutator):
pop, push = mutator
values = []
for i in range(pop):
values.append(record.stack.pop())
taint = any(values)
for i in range(push):
record.stack.append(taint)
@staticmethod
def mutate_push(op, record):
TaintRunner.mutate_stack(record, (0, 1))
@staticmethod
def mutate_dup(op, record):
depth = int(op[3:])
index = len(record.stack) - depth
record.stack.append(record.stack[index])
@staticmethod
def mutate_swap(op, record):
depth = int(op[4:])
l = len(record.stack) - 1
i = l - depth
record.stack[l], record.stack[i] = record.stack[i], record.stack[l]
@staticmethod
def mutate_mload(record, op0):
_ = record.stack.pop()
try:
index = helper.get_concrete_int(op0)
except AttributeError:
logging.debug("Can't MLOAD taint track symbolically")
record.stack.append(False)
return
record.stack.append(record.memory_tainted(index))
@staticmethod
def mutate_mstore(record, op0):
_, value_taint = record.stack.pop(), record.stack.pop()
try:
index = helper.get_concrete_int(op0)
except AttributeError:
logging.debug("Can't mstore taint track symbolically")
return
record.memory[index] = value_taint
@staticmethod
def mutate_sload(record, op0):
_ = record.stack.pop()
try:
index = helper.get_concrete_int(op0)
except AttributeError:
logging.debug("Can't MLOAD taint track symbolically")
record.stack.append(False)
return
record.stack.append(record.storage_tainted(index))
@staticmethod
def mutate_sstore(record, op0):
_, value_taint = record.stack.pop(), record.stack.pop()
try:
index = helper.get_concrete_int(op0)
except AttributeError:
logging.debug("Can't mstore taint track symbolically")
return
record.storage[index] = value_taint
@staticmethod
def mutate_log(record, op):
depth = int(op[3:])
for _ in range(depth + 2):
record.stack.pop()
@staticmethod
def mutate_call(record, op):
pops = 6
if op in ('CALL', 'CALLCODE'):
pops += 1
for _ in range(pops):
record.stack.pop()
record.stack.append(False)
stack_taint_table = {
# instruction: (taint source, taint target)
'POP': (1, 0),
'ADD': (2, 1),
'MUL': (2, 1),
'SUB': (2, 1),
'AND': (2, 1),
'OR': (2, 1),
'XOR': (2, 1),
'NOT': (1, 1),
'BYTE': (2, 1),
'DIV': (2, 1),
'MOD': (2, 1),
'SDIV': (2, 1),
'SMOD': (2, 1),
'ADDMOD': (3, 1),
'MULMOD': (3, 1),
'EXP': (2, 1),
'SIGNEXTEND': (2, 1),
'LT': (2, 1),
'GT': (2, 1),
'SLT': (2, 1),
'SGT': (2, 1),
'EQ': (2, 1),
'ISZERO': (1, 1),
'CALLVALUE': (0, 1),
'CALLDATALOAD': (1, 1),
'CALLDATACOPY': (3, 0), #todo
'CALLDATASIZE': (0, 1),
'ADDRESS': (0, 1),
'BALANCE': (1, 1),
'ORIGIN': (0, 1),
'CALLER': (0, 1),
'CODESIZE': (0, 1),
'SHA3': (2, 1),
'GASPRICE': (0, 1),
'CODECOPY': (3, 0),
'EXTCODESIZE': (1, 1),
'EXTCODECOPY': (4, 0),
'RETURNDATASIZE': (0, 1),
'BLOCKHASH': (1, 1),
'COINBASE': (0, 1),
'TIMESTAMP': (0, 1),
'NUMBER': (0, 1),
'DIFFICULTY': (0, 1),
'GASLIMIT': (0, 1),
'JUMP': (1, 0),
'JUMPI': (2, 0),
'PC': (0, 1),
'MSIZE': (0, 1),
'GAS': (0, 1),
'CREATE': (3, 1),
'RETURN': (2, 0)
}
|
/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE124_Buffer_Underwrite__char_alloca_cpy_68a.c
Label Definition File: CWE124_Buffer_Underwrite.stack.label.xml
Template File: sources-sink-68a.tmpl.c
*/
/*
* @description
* CWE: 124 Buffer Underwrite
* BadSource: Set data pointer to before the allocated memory buffer
* GoodSource: Set data pointer to the allocated memory buffer
* Sink: cpy
* BadSink : Copy string to data using strcpy
* Flow Variant: 68 Data flow: data passed as a global variable from one function to another in different source files
*
* */
#include "std_testcase.h"
#include <wchar.h>
char * CWE124_Buffer_Underwrite__char_alloca_cpy_68_badData;
char * CWE124_Buffer_Underwrite__char_alloca_cpy_68_goodG2BData;
#ifndef OMITBAD
/* bad function declaration */
void CWE124_Buffer_Underwrite__char_alloca_cpy_68b_badSink();
void CWE124_Buffer_Underwrite__char_alloca_cpy_68_bad()
{
char * data;
char * dataBuffer = (char *)ALLOCA(100*sizeof(char));
memset(dataBuffer, 'A', 100-1);
dataBuffer[100-1] = '\0';
/* FLAW: Set data pointer to before the allocated memory buffer */
data = dataBuffer - 8;
CWE124_Buffer_Underwrite__char_alloca_cpy_68_badData = data;
CWE124_Buffer_Underwrite__char_alloca_cpy_68b_badSink();
}
#endif /* OMITBAD */
#ifndef OMITGOOD
/* good function declarations */
void CWE124_Buffer_Underwrite__char_alloca_cpy_68b_goodG2BSink();
/* goodG2B uses the GoodSource with the BadSink */
static void goodG2B()
{
char * data;
char * dataBuffer = (char *)ALLOCA(100*sizeof(char));
memset(dataBuffer, 'A', 100-1);
dataBuffer[100-1] = '\0';
/* FIX: Set data pointer to the allocated memory buffer */
data = dataBuffer;
CWE124_Buffer_Underwrite__char_alloca_cpy_68_goodG2BData = data;
CWE124_Buffer_Underwrite__char_alloca_cpy_68b_goodG2BSink();
}
void CWE124_Buffer_Underwrite__char_alloca_cpy_68_good()
{
goodG2B();
}
#endif /* OMITGOOD */
/* Below is the main(). It is only used when building this testcase on
* its own for testing or for building a binary to use in testing binary
* analysis tools. It is not used when compiling all the testcases as one
* application, which is how source code analysis tools are tested.
*/
#ifdef INCLUDEMAIN
int main(int argc, char * argv[])
{
/* seed randomness */
srand( (unsigned)time(NULL) );
#ifndef OMITGOOD
printLine("Calling good()...");
CWE124_Buffer_Underwrite__char_alloca_cpy_68_good();
printLine("Finished good()");
#endif /* OMITGOOD */
#ifndef OMITBAD
printLine("Calling bad()...");
CWE124_Buffer_Underwrite__char_alloca_cpy_68_bad();
printLine("Finished bad()");
#endif /* OMITBAD */
return 0;
}
#endif
|
import shutil
import os
import logging
from packaging import version
import unittest
import numpy as np
import bilby
import scipy
from scipy.stats import ks_2samp, kstest
def ks_2samp_wrapper(data1, data2):
if version.parse(scipy.__version__) >= version.parse("1.3.0"):
return ks_2samp(data1, data2, alternative="two-sided", mode="asymp")
else:
return ks_2samp(data1, data2)
class Test(unittest.TestCase):
outdir = "outdir_for_tests"
@classmethod
def setUpClass(self):
if os.path.isdir(self.outdir):
try:
shutil.rmtree(self.outdir)
except OSError:
logging.warning("{} not removed prior to tests".format(self.outdir))
@classmethod
def tearDownClass(self):
if os.path.isdir(self.outdir):
try:
shutil.rmtree(self.outdir)
except OSError:
logging.warning("{} not removed prior to tests".format(self.outdir))
def test_fifteen_dimensional_cbc(self):
duration = 4.0
sampling_frequency = 2048.0
label = "full_15_parameters"
np.random.seed(8817020)
waveform_arguments = dict(
waveform_approximant="IMRPhenomPv2",
reference_frequency=50.0,
minimum_frequency=20.0,
)
waveform_generator = bilby.gw.WaveformGenerator(
duration=duration,
sampling_frequency=sampling_frequency,
frequency_domain_source_model=bilby.gw.source.lal_binary_black_hole,
parameter_conversion=bilby.gw.conversion.convert_to_lal_binary_black_hole_parameters,
waveform_arguments=waveform_arguments,
)
ifos = bilby.gw.detector.InterferometerList(["H1", "L1"])
ifos.set_strain_data_from_power_spectral_densities(
sampling_frequency=sampling_frequency, duration=duration, start_time=0
)
priors = bilby.gw.prior.BBHPriorDict()
priors.pop("mass_1")
priors.pop("mass_2")
priors["chirp_mass"] = bilby.prior.Uniform(
name="chirp_mass",
latex_label="$M$",
minimum=10.0,
maximum=100.0,
unit="$M_{\\odot}$",
)
priors["mass_ratio"] = bilby.prior.Uniform(
name="mass_ratio", latex_label="$q$", minimum=0.5, maximum=1.0
)
priors["geocent_time"] = bilby.core.prior.Uniform(minimum=-0.1, maximum=0.1)
likelihood = bilby.gw.GravitationalWaveTransient(
interferometers=ifos,
waveform_generator=waveform_generator,
priors=priors,
distance_marginalization=False,
phase_marginalization=False,
time_marginalization=False,
)
likelihood = bilby.core.likelihood.ZeroLikelihood(likelihood)
result = bilby.run_sampler(
likelihood=likelihood,
priors=priors,
sampler="dynesty",
npoints=1000,
walks=100,
outdir=self.outdir,
label=label,
)
pvalues = [
ks_2samp_wrapper(
result.priors[key].sample(10000), result.posterior[key].values
).pvalue
for key in priors.keys()
]
print("P values per parameter")
for key, p in zip(priors.keys(), pvalues):
print(key, p)
self.assertGreater(kstest(pvalues, "uniform").pvalue, 0.01)
if __name__ == "__main__":
unittest.main()
|
"""
Work with subtractions in the database.
"""
import asyncio
import glob
import shutil
from typing import Any, Dict, List, Optional
from sqlalchemy.ext.asyncio import AsyncEngine
import virtool.utils
from virtool.config.cls import Config
from virtool.db.utils import get_new_id, get_one_field
from virtool.subtractions.utils import get_subtraction_files, join_subtraction_path
from virtool.types import App
PROJECTION = [
"_id",
"count",
"created_at",
"file",
"ready",
"job",
"name",
"nickname",
"user",
"has_file",
]
ADD_SUBTRACTION_FILES_QUERY = {"deleted": False}
async def attach_computed(app: App, subtraction: Dict[str, Any]) -> Dict[str, Any]:
"""
Attach the ``linked_samples`` and ``files`` fields to the passed subtraction document.
Queries MongoDB and SQL to find the required data. Returns a new document dictionary.
:param app: the application object
:param subtraction: the subtraction document to attach to
:return: a new subtraction document with new fields attached
"""
subtraction_id = subtraction["_id"]
files, linked_samples = await asyncio.gather(
get_subtraction_files(app["pg"], subtraction_id),
virtool.subtractions.db.get_linked_samples(app["db"], subtraction_id),
)
return {**subtraction, "files": files, "linked_samples": linked_samples}
async def attach_subtractions(db, document: Dict[str, Any]) -> Dict[str, Any]:
"""
Attach more subtraction detail to a document with a field `subtractions` that contains a list
of subtraction IDs.
:param db: the application database client
:param document: the document to attach data to
:return: the updated document
"""
if document.get("subtractions"):
subtractions = list()
for subtraction_id in document["subtractions"]:
subtraction_name = await get_one_field(
db.subtraction, "name", subtraction_id
)
subtractions.append({"id": subtraction_id, "name": subtraction_name})
return {**document, "subtractions": subtractions}
return document
async def check_subtraction_fasta_files(db, config: Config) -> list:
"""
Check subtraction directories for files and set 'has_file' to boolean based on whether .fa.gz
exists.
:param db: the application database client
:param config: the application configuration
:return: a list of subtraction IDs without FASTA files
"""
subtractions_without_fasta = list()
async for subtraction in db.subtraction.find({"deleted": False}):
path = join_subtraction_path(config, subtraction["_id"])
has_file = True
if not glob.glob(f"{path}/*.fa.gz"):
has_file = False
subtractions_without_fasta.append(subtraction["_id"])
await db.subtraction.find_one_and_update(
{"_id": subtraction["_id"]}, {"$set": {"has_file": has_file}}
)
return subtractions_without_fasta
async def create(
db,
user_id: str,
filename: str,
name: str,
nickname: str,
upload_id: int,
subtraction_id: Optional[str] = None,
) -> dict:
"""
Create a new subtraction document.
:param db: the application database client
:param user_id: the id of the current user
:param filename: the name of the `subtraction_file`
:param name: the name of the subtraction
:param nickname: the nickname of the subtraction
:param upload_id: the id of the `subtraction_file`
:param subtraction_id: the id of the subtraction
:return: the new document
"""
document = {
"_id": subtraction_id or await virtool.db.utils.get_new_id(db.subtraction),
"name": name,
"nickname": nickname,
"deleted": False,
"ready": False,
"file": {"id": upload_id, "name": filename},
"user": {"id": user_id},
"created_at": virtool.utils.timestamp(),
}
await db.subtraction.insert_one(document)
return document
async def delete(app: App, subtraction_id: str) -> int:
db = app["db"]
config = app["config"]
update_result = await db.subtraction.update_one(
{"_id": subtraction_id, "deleted": False}, {"$set": {"deleted": True}}
)
await unlink_default_subtractions(db, subtraction_id)
if update_result.modified_count:
path = join_subtraction_path(config, subtraction_id)
await app["run_in_thread"](shutil.rmtree, path, True)
return update_result.modified_count
async def finalize(
db,
pg: AsyncEngine,
subtraction_id: str,
gc: Dict[str, float],
count: int,
) -> dict:
"""
Finalize a subtraction by setting `ready` to True and updating the `gc` and `files` fields.
:param db: the application database client
:param pg: the PostgreSQL AsyncEngine object
:param subtraction_id: the id of the subtraction
:param gc: a dict contains gc data
:return: the updated subtraction document
"""
updated_document = await db.subtraction.find_one_and_update(
{"_id": subtraction_id},
{
"$set": {
"gc": gc,
"ready": True,
"count": count,
}
},
)
return updated_document
async def get_linked_samples(db, subtraction_id: str) -> List[dict]:
"""
Find all samples containing given 'subtraction_id' in 'subtractions' field.
:param db: the application database client
:param subtraction_id: the ID of the subtraction
:return: a list of dicts containing linked samples with 'id' and 'name' field.
"""
cursor = db.samples.find({"subtractions": subtraction_id}, ["_id", "name"])
return [virtool.utils.base_processor(d) async for d in cursor]
async def unlink_default_subtractions(db, subtraction_id: str):
await db.samples.update_many(
{"subtractions": subtraction_id}, {"$pull": {"subtractions": subtraction_id}}
)
|
"""
Scatter Plot with Minimap
-------------------------
This example shows how to create a miniature version of a plot
such that creating a selection in the miniature version
adjusts the axis limits in another, more detailed view.
"""
# category: scatter plots
import altair as alt
from vega_datasets import data
source = data.seattle_weather()
zoom = alt.selection_interval(encodings=["x", "y"])
minimap = (
alt.Chart(source)
.mark_point()
.add_selection(zoom)
.encode(
x="date:T",
y="temp_max:Q",
color=alt.condition(zoom, "weather", alt.value("lightgray")),
)
.properties(
width=200,
height=200,
title="Minimap -- click and drag to zoom in the detail view",
)
)
detail = (
alt.Chart(source)
.mark_point()
.encode(
x=alt.X(
"date:T",
scale=alt.Scale(domain={"selection": zoom.name, "encoding": "x"}),
),
y=alt.Y(
"temp_max:Q",
scale=alt.Scale(domain={"selection": zoom.name, "encoding": "y"}),
),
color="weather",
)
.properties(width=600, height=400, title="Seattle weather -- detail view")
)
detail | minimap
|
#pragma once
enum class ClassId {
C4 = 34,
Chicken = 36,
CSPlayer = 40,
Deagle = 46,
Knife = 107,
KnifeGG,
PlantedC4 = 128,
Aug = 231,
Awp,
Elite = 238,
FiveSeven = 240,
G3sg1,
Glock = 244,
P2000,
P250 = 257,
Scar20 = 260,
Sg553 = 264,
Ssg08 = 266,
Tec9 = 268
};
|
import React from "react";
import { render } from "@testing-library/react";
import { ThemeContext } from "./context/contexts";
import UserContext, { ANONYMOUS_USER } from "./context/user/context";
function MutationObserver(callback) {
this.observe = jest.fn();
this.disconnect = jest.fn();
this.takeRecords = jest.fn();
}
global.MutationObserver = jest.fn(MutationObserver);
const mockSpies = {
setDarkMode: jest.fn(),
userDispatch: jest.fn(),
};
const AllTheProviders = ({ children }) => (
<ThemeContext.Provider
value={{ darkMode: false, setDarkMode: mockSpies.setDarkMode }}
>
<UserContext.Provider
value={{
userData: ANONYMOUS_USER,
userDispatch: mockSpies.userDispatch,
}}
>
{children}
</UserContext.Provider>
</ThemeContext.Provider>
);
const customRender = (ui, options) => {
return {
...render(ui, { wrapper: AllTheProviders, ...options }),
mockSpies,
};
};
const testLinkArray = (linkArray, expected_length, expected_refs) => {
expect(linkArray.length).toBe(expected_length);
linkArray.forEach(element => {
expect(expected_refs).toContain(element.getAttribute("href"));
});
};
export * from "@testing-library/react";
export { customRender as render, testLinkArray };
|
/**
* Renders a markdown string as html.
*/
import Remarkable from 'remarkable';
const md = constructRemarkableRenderer();
export default function renderMarkdown(string) {
return md.render(string);
}
function constructRemarkableRenderer() {
const md = new Remarkable({
linkify: true
});
md.use(inlineYoutebeVideos);
return md;
}
function inlineYoutebeVideos(md) {
// I'm not sure if this is the right way to extend Remarkable. I'm just
// checking if a link is a youtube link, and if it is, then I return my own
// html. Otherwise the original remarkable methods are called.
const rules = md.renderer.rules;
const originalLinkOpen = rules.link_open;
const originalLinkClose = rules.link_close;
rules.link_open = function linkDecorator(tokens, idx, options) {
const href = tokens[idx].href;
const youtubeVideoId = isYouTubueLink(href);
if (youtubeVideoId) {
tokens.noClose = true;
// TODO: this should be flexible width;
return `<iframe width="560" height="315" src="https://www.youtube.com/embed/${youtubeVideoId}" frameborder="0" allowfullscreen></iframe>`;
}
return originalLinkOpen(tokens, idx, options);
};
rules.link_close = function linkCloseDecorator(tokens, idx, options) {
if (tokens.noClose) return '';
return originalLinkClose(tokens, idx, options);
};
}
function isYouTubueLink(href) {
const youtubeRegex = /^.*((youtu.be\/)|(v\/)|(\/u\/\w\/)|(embed\/)|(watch\?))\??v?=?([^#&?]*).*/;
const youtubeMatch = href.match(youtubeRegex);
if (youtubeMatch && youtubeMatch[7].length === 11) {
return youtubeMatch[7];
}
}
|
import { h } from 'vue'
export default {
name: "Toggle2Off",
vendor: "B",
type: "",
tags: ["toggle2","off"],
render() {
return h(
"svg",
{"xmlns":"http://www.w3.org/2000/svg","width":"16","height":"16","fill":"currentColor","class":"v-icon","viewBox":"0 0 16 16","data-name":"b-toggle2-off","innerHTML":" <path d='M9 11c.628-.836 1-1.874 1-3a4.978 4.978 0 0 0-1-3h4a3 3 0 1 1 0 6H9z'/> <path d='M5 12a4 4 0 1 1 0-8 4 4 0 0 1 0 8zm0 1A5 5 0 1 0 5 3a5 5 0 0 0 0 10z'/>"},
)
}
}
|
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin as BaseAdmin
from django.utils.translation import gettext as _
from core import models
class UserAdmin(BaseAdmin):
ordering = ['id']
list_display = ['email', 'name']
fieldsets = (
(None, {'fields': ('email', 'password')}),
(_('Personal Info'), {'fields': ('name',)}),
(
_('Permissions'),
{'fields': ('is_active', 'is_staff', 'is_superuser')}
),
(_('Important dates'), {'fields': ('last_login',)})
)
add_fieldsets = (
(None, {
'classes': ('wide',),
'fields': ('email', 'password1', 'password2')
}),
)
admin.site.register(models.User, UserAdmin)
admin.site.register(models.Tag)
admin.site.register(models.Ingredient)
admin.site.register(models.Recipe)
|
/**
* SEO component that queries for data with
* Gatsby's useStaticQuery React hook
*
* See: https://www.gatsbyjs.org/docs/use-static-query/
*/
import { graphql, useStaticQuery } from 'gatsby'
import PropTypes from 'prop-types'
import React from 'react'
import Helmet from 'react-helmet'
function SEO({ description, lang, meta, title }) {
const { site } = useStaticQuery(
graphql`
query {
site {
siteMetadata {
title
description
author
}
}
}
`
)
const metaDescription = description || site.siteMetadata.description
return (
<Helmet
htmlAttributes={{
lang,
}}
title={title}
titleTemplate={`%s | ${site.siteMetadata.title}`}
meta={[
{
name: `description`,
content: metaDescription,
},
{
property: `og:title`,
content: title,
},
{
property: `og:description`,
content: metaDescription,
},
{
property: `og:type`,
content: `website`,
},
{
name: `twitter:card`,
content: `summary`,
},
{
name: `twitter:creator`,
content: site.siteMetadata.author,
},
{
name: `twitter:title`,
content: title,
},
{
name: `twitter:description`,
content: metaDescription,
},
].concat(meta)}
/>
)
}
SEO.defaultProps = {
lang: `en`,
meta: [],
description: ``,
}
SEO.propTypes = {
description: PropTypes.string,
lang: PropTypes.string,
meta: PropTypes.arrayOf(PropTypes.object),
title: PropTypes.string.isRequired,
}
export default SEO
|
/*
* ESPRESSIF MIT License
*
* Copyright (c) 2021 <ESPRESSIF SYSTEMS (SHANGHAI) CO., LTD>
*
* Permission is hereby granted for use on all ESPRESSIF SYSTEMS products, in which case,
* it is free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the Software is furnished
* to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or
* substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
*/
#include "string.h"
#include "esp_log.h"
#include "i2c_bus.h"
#include "es8156.h"
#include "driver/gpio.h"
#include "board.h"
#define ES8156_ADDR 0x10
#define VOLUME_STEP_NUM 10
static const char *TAG = "DRV8156";
static bool codec_init_flag = 0;
static i2c_bus_handle_t i2c_handle;
// 0dB = 0xBF;
static uint8_t reg_vol[VOLUME_STEP_NUM] = {0, 139, 151, 163, 171, 175, 179, 183, 187, 191};
audio_hal_func_t AUDIO_CODEC_ES8156_DEFAULT_HANDLE = {
.audio_codec_initialize = es8156_codec_init,
.audio_codec_deinitialize = es8156_codec_deinit,
.audio_codec_ctrl = es8156_codec_ctrl_state,
.audio_codec_config_iface = es8156_codec_config_i2s,
.audio_codec_set_mute = es8156_codec_set_voice_mute,
.audio_codec_set_volume = es8156_codec_set_voice_volume,
.audio_codec_get_volume = es8156_codec_get_voice_volume,
.audio_hal_lock = NULL,
.handle = NULL,
};
static bool es8156_codec_initialized()
{
return codec_init_flag;
}
static esp_err_t es8156_write_reg(uint8_t reg_addr, uint8_t data)
{
return i2c_bus_write_bytes(i2c_handle, ES8156_ADDR, ®_addr, sizeof(reg_addr), &data, sizeof(data));
}
static int es8156_read_reg(uint8_t reg_addr)
{
uint8_t data;
i2c_bus_read_bytes(i2c_handle, ES8156_ADDR, ®_addr, sizeof(reg_addr), &data, sizeof(data));
return (int)data;
}
static int i2c_init()
{
int res = 0;
i2c_config_t es_i2c_cfg = {
.mode = I2C_MODE_MASTER,
.sda_pullup_en = GPIO_PULLUP_ENABLE,
.scl_pullup_en = GPIO_PULLUP_ENABLE,
.master.clk_speed = 100000,
};
res = get_i2c_pins(I2C_NUM_0, &es_i2c_cfg);
if (res != ESP_OK) {
ESP_LOGE(TAG, "getting i2c pins error");
}
i2c_handle = i2c_bus_create(I2C_NUM_0, &es_i2c_cfg);
return res;
}
static esp_err_t es8156_standby(void)
{
esp_err_t ret = 0;
ret = es8156_write_reg(0x14, 0x00);
ret |= es8156_write_reg(0x19, 0x02);
ret |= es8156_write_reg(0x21, 0x1F);
ret |= es8156_write_reg(0x22, 0x02);
ret |= es8156_write_reg(0x25, 0x21);
ret |= es8156_write_reg(0x25, 0xA1);
ret |= es8156_write_reg(0x18, 0x01);
ret |= es8156_write_reg(0x09, 0x02);
ret |= es8156_write_reg(0x09, 0x01);
ret |= es8156_write_reg(0x08, 0x00);
return ret;
}
static esp_err_t es8156_resume(void)
{
esp_err_t ret = 0;
ret |= es8156_write_reg(0x08, 0x3F);
ret |= es8156_write_reg(0x09, 0x00);
ret |= es8156_write_reg(0x18, 0x00);
ret |= es8156_write_reg(0x25, 0x20);
ret |= es8156_write_reg(0x22, 0x00);
ret |= es8156_write_reg(0x21, 0x3C);
ret |= es8156_write_reg(0x19, 0x20);
ret |= es8156_write_reg(0x14, 179);
return ret;
}
void es8156_pa_power(bool enable)
{
gpio_config_t io_conf;
memset(&io_conf, 0, sizeof(io_conf));
io_conf.mode = GPIO_MODE_OUTPUT;
io_conf.pin_bit_mask = BIT64(get_pa_enable_gpio());
io_conf.pull_down_en = 0;
io_conf.pull_up_en = 0;
gpio_config(&io_conf);
if (enable) {
gpio_set_level(get_pa_enable_gpio(), 1);
} else {
gpio_set_level(get_pa_enable_gpio(), 0);
}
}
esp_err_t es8156_codec_init(audio_hal_codec_config_t *cfg)
{
if (es8156_codec_initialized()) {
ESP_LOGW(TAG, "The es8156 DAC has been already initialized");
return ESP_OK;
}
codec_init_flag = true;
i2c_init();
es8156_write_reg(0x02, 0x04);
es8156_write_reg(0x20, 0x2A);
es8156_write_reg(0x21, 0x3C);
es8156_write_reg(0x22, 0x00);
es8156_write_reg(0x24, 0x07);
es8156_write_reg(0x23, 0x00);
es8156_write_reg(0x0A, 0x01);
es8156_write_reg(0x0B, 0x01);
es8156_write_reg(0x11, 0x00);
es8156_write_reg(0x14, 179); // volume 70%
es8156_write_reg(0x0D, 0x14);
es8156_write_reg(0x18, 0x00);
es8156_write_reg(0x08, 0x3F);
es8156_write_reg(0x00, 0x02);
es8156_write_reg(0x00, 0x03);
es8156_write_reg(0x25, 0x20);
es8156_pa_power(true);
return ESP_OK;
}
esp_err_t es8156_codec_deinit(void)
{
codec_init_flag = false;
return ESP_OK;
}
esp_err_t es8156_codec_ctrl_state(audio_hal_codec_mode_t mode, audio_hal_ctrl_t ctrl_state)
{
esp_err_t ret = ESP_OK;
if (ctrl_state == AUDIO_HAL_CTRL_START) {
ret = es8156_resume();
} else {
ESP_LOGW(TAG, "The codec going to stop");
ret = es8156_standby();
}
return ret;
}
esp_err_t es8156_codec_config_i2s(audio_hal_codec_mode_t mode, audio_hal_codec_i2s_iface_t *iface)
{
return ESP_OK;
}
esp_err_t es8156_codec_set_voice_mute(bool enable)
{
int regv = es8156_read_reg(ES8156_DAC_MUTE_REG13);
if (enable) {
regv = regv | BIT(1) | BIT(2);
} else {
regv = regv & (~(BIT(1) | BIT(2))) ;
}
es8156_write_reg(ES8156_DAC_MUTE_REG13, regv);
return ESP_OK;
}
esp_err_t es8156_codec_set_voice_volume(int volume)
{
int ret = 0;
if (volume < 0) {
volume = 0;
} else if (volume >= 100) {
volume = 99;
}
int vol = (volume) / VOLUME_STEP_NUM;
ESP_LOGD(TAG, "SET: volume:%d, regv:%d", volume, reg_vol[vol]);
es8156_write_reg(ES8156_VOLUME_CONTROL_REG14, reg_vol[vol]);
return ret;
}
esp_err_t es8156_codec_get_voice_volume(int *volume)
{
int ret = 0;
int regv = 0;
*volume = 0;
regv = es8156_read_reg(ES8156_VOLUME_CONTROL_REG14);
if (regv == ESP_FAIL) {
ret = ESP_FAIL;
} else {
for (int i = 0; i < VOLUME_STEP_NUM; ++i) {
if (reg_vol[i] == regv) {
*volume = i * VOLUME_STEP_NUM;
}
}
}
ESP_LOGD(TAG, "GET: regv:%d, volume:%d%%", regv, *volume);
return ret;
}
|
// Setup the network sockets for the different platforms.
#ifndef SOCKETX_H
#define SOCKETX_H
#ifdef _WIN32
#include <winsock.h>
#pragma comment(lib, "ws2_32.lib") // Include the wsock32 (version 2) library, automatically on Windows builds.
typedef int socklen_t;
#pragma warning(disable: 4127) // incompatible with FD_SET()
#else
#include <arpa/inet.h>
#include <netdb.h>
#include <netinet/in.h>
#include <sys/select.h>
#include <sys/socket.h>
#include <unistd.h>
#define closesocket(x) close(x)
#define INVALID_SOCKET -1
typedef int SOCKET;
#endif
#endif
|
/** @license React v0.13.4
* scheduler.production.min.js
*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
'use strict';Object.defineProperty(exports,"__esModule",{value:!0});var d=null,e=!1,g=3,k=-1,l=-1,m=!1,n=!1;function p(){if(!m){var a=d.expirationTime;n?q():n=!0;r(t,a)}}
function u(){var a=d,b=d.next;if(d===b)d=null;else{var c=d.previous;d=c.next=b;b.previous=c}a.next=a.previous=null;c=a.callback;b=a.expirationTime;a=a.priorityLevel;var f=g,Q=l;g=a;l=b;try{var h=c()}finally{g=f,l=Q}if("function"===typeof h)if(h={callback:h,priorityLevel:a,expirationTime:b,next:null,previous:null},null===d)d=h.next=h.previous=h;else{c=null;a=d;do{if(a.expirationTime>=b){c=a;break}a=a.next}while(a!==d);null===c?c=d:c===d&&(d=h,p());b=c.previous;b.next=c.previous=h;h.next=c;h.previous=
b}}function v(){if(-1===k&&null!==d&&1===d.priorityLevel){m=!0;try{do u();while(null!==d&&1===d.priorityLevel)}finally{m=!1,null!==d?p():n=!1}}}function t(a){m=!0;var b=e;e=a;try{if(a)for(;null!==d;){var c=exports.unstable_now();if(d.expirationTime<=c){do u();while(null!==d&&d.expirationTime<=c)}else break}else if(null!==d){do u();while(null!==d&&!w())}}finally{m=!1,e=b,null!==d?p():n=!1,v()}}
var x=Date,y="function"===typeof setTimeout?setTimeout:void 0,z="function"===typeof clearTimeout?clearTimeout:void 0,A="function"===typeof requestAnimationFrame?requestAnimationFrame:void 0,B="function"===typeof cancelAnimationFrame?cancelAnimationFrame:void 0,C,D;function E(a){C=A(function(b){z(D);a(b)});D=y(function(){B(C);a(exports.unstable_now())},100)}
if("object"===typeof performance&&"function"===typeof performance.now){var F=performance;exports.unstable_now=function(){return F.now()}}else exports.unstable_now=function(){return x.now()};var r,q,w,G=null;"undefined"!==typeof window?G=window:"undefined"!==typeof global&&(G=global);
if(G&&G._schedMock){var H=G._schedMock;r=H[0];q=H[1];w=H[2];exports.unstable_now=H[3]}else if("undefined"===typeof window||"function"!==typeof MessageChannel){var I=null,J=function(a){if(null!==I)try{I(a)}finally{I=null}};r=function(a){null!==I?setTimeout(r,0,a):(I=a,setTimeout(J,0,!1))};q=function(){I=null};w=function(){return!1}}else{"undefined"!==typeof console&&("function"!==typeof A&&console.error("This browser doesn't support requestAnimationFrame. Make sure that you load a polyfill in older browsers. https://fb.me/react-polyfills"),
"function"!==typeof B&&console.error("This browser doesn't support cancelAnimationFrame. Make sure that you load a polyfill in older browsers. https://fb.me/react-polyfills"));var K=null,L=!1,M=-1,N=!1,O=!1,P=0,R=33,S=33;w=function(){return P<=exports.unstable_now()};var T=new MessageChannel,U=T.port2;T.port1.onmessage=function(){L=!1;var a=K,b=M;K=null;M=-1;var c=exports.unstable_now(),f=!1;if(0>=P-c)if(-1!==b&&b<=c)f=!0;else{N||(N=!0,E(V));K=a;M=b;return}if(null!==a){O=!0;try{a(f)}finally{O=!1}}};
var V=function(a){if(null!==K){E(V);var b=a-P+S;b<S&&R<S?(8>b&&(b=8),S=b<R?R:b):R=b;P=a+S;L||(L=!0,U.postMessage(void 0))}else N=!1};r=function(a,b){K=a;M=b;O||0>b?U.postMessage(void 0):N||(N=!0,E(V))};q=function(){K=null;L=!1;M=-1}}exports.unstable_ImmediatePriority=1;exports.unstable_UserBlockingPriority=2;exports.unstable_NormalPriority=3;exports.unstable_IdlePriority=5;exports.unstable_LowPriority=4;
exports.unstable_runWithPriority=function(a,b){switch(a){case 1:case 2:case 3:case 4:case 5:break;default:a=3}var c=g,f=k;g=a;k=exports.unstable_now();try{return b()}finally{g=c,k=f,v()}};exports.unstable_next=function(a){switch(g){case 1:case 2:case 3:var b=3;break;default:b=g}var c=g,f=k;g=b;k=exports.unstable_now();try{return a()}finally{g=c,k=f,v()}};
exports.unstable_scheduleCallback=function(a,b){var c=-1!==k?k:exports.unstable_now();if("object"===typeof b&&null!==b&&"number"===typeof b.timeout)b=c+b.timeout;else switch(g){case 1:b=c+-1;break;case 2:b=c+250;break;case 5:b=c+1073741823;break;case 4:b=c+1E4;break;default:b=c+5E3}a={callback:a,priorityLevel:g,expirationTime:b,next:null,previous:null};if(null===d)d=a.next=a.previous=a,p();else{c=null;var f=d;do{if(f.expirationTime>b){c=f;break}f=f.next}while(f!==d);null===c?c=d:c===d&&(d=a,p());
b=c.previous;b.next=c.previous=a;a.next=c;a.previous=b}return a};exports.unstable_cancelCallback=function(a){var b=a.next;if(null!==b){if(b===a)d=null;else{a===d&&(d=b);var c=a.previous;c.next=b;b.previous=c}a.next=a.previous=null}};exports.unstable_wrapCallback=function(a){var b=g;return function(){var c=g,f=k;g=b;k=exports.unstable_now();try{return a.apply(this,arguments)}finally{g=c,k=f,v()}}};exports.unstable_getCurrentPriorityLevel=function(){return g};
exports.unstable_shouldYield=function(){return!e&&(null!==d&&d.expirationTime<l||w())};exports.unstable_continueExecution=function(){null!==d&&p()};exports.unstable_pauseExecution=function(){};exports.unstable_getFirstCallbackNode=function(){return d};
|
'''
Progress class for modules. Represents where a student is in a module.
For most subclassing needs, you should only need to reimplement
frac() and __str__().
'''
import numbers
class Progress: # pylint: disable=eq-without-hash
'''Represents a progress of a/b (a out of b done)
a and b must be numeric, but not necessarily integer, with
0 <= a <= b and b > 0.
Progress can only represent Progress for modules where that makes sense. Other
modules (e.g. html) should return None from get_progress().
TODO: add tag for module type? Would allow for smarter merging.
'''
def __init__(self, a, b):
'''Construct a Progress object. a and b must be numbers, and must have
0 <= a <= b and b > 0
'''
# Want to do all checking at construction time, so explicitly check types
if not (isinstance(a, numbers.Number) and
isinstance(b, numbers.Number)):
raise TypeError(f'a and b must be numbers. Passed {a}/{b}')
if a > b: # lint-amnesty, pylint: disable=consider-using-min-builtin
a = b
if a < 0: # lint-amnesty, pylint: disable=consider-using-max-builtin
a = 0
if b <= 0:
raise ValueError(f'fraction a/b = {a}/{b} must have b > 0')
self._a = a
self._b = b
def frac(self):
''' Return tuple (a,b) representing progress of a/b'''
return (self._a, self._b)
def percent(self):
''' Returns a percentage progress as a float between 0 and 100.
subclassing note: implemented in terms of frac(), assumes sanity
checking is done at construction time.
'''
(a, b) = self.frac()
return 100.0 * a / b
def started(self):
''' Returns True if fractional progress is greater than 0.
subclassing note: implemented in terms of frac(), assumes sanity
checking is done at construction time.
'''
return self.frac()[0] > 0
def inprogress(self):
''' Returns True if fractional progress is strictly between 0 and 1.
subclassing note: implemented in terms of frac(), assumes sanity
checking is done at construction time.
'''
(a, b) = self.frac()
return a > 0 and a < b # lint-amnesty, pylint: disable=chained-comparison
def done(self):
''' Return True if this represents done.
subclassing note: implemented in terms of frac(), assumes sanity
checking is done at construction time.
'''
(a, b) = self.frac()
return a == b
def ternary_str(self):
''' Return a string version of this progress: either
"none", "in_progress", or "done".
subclassing note: implemented in terms of frac()
'''
(a, b) = self.frac()
if a == 0:
return "none"
if a < b:
return "in_progress"
return "done"
def __eq__(self, other):
''' Two Progress objects are equal if they have identical values.
Implemented in terms of frac()'''
if not isinstance(other, Progress):
return False
(a, b) = self.frac()
(a2, b2) = other.frac()
return a == a2 and b == b2
def __ne__(self, other):
''' The opposite of equal'''
return not self.__eq__(other)
def __str__(self):
'''Return a string representation of this string. Rounds results to
two decimal places, stripping out any trailing zeroes.
subclassing note: implemented in terms of frac().
'''
(a, b) = self.frac()
display = lambda n: f'{n:.2f}'.rstrip('0').rstrip('.')
return f"{display(a)}/{display(b)}"
@staticmethod
def add_counts(a, b):
'''Add two progress indicators, assuming that each represents items done:
(a / b) + (c / d) = (a + c) / (b + d).
If either is None, returns the other.
'''
if a is None:
return b
if b is None:
return a
# get numerators + denominators
(n, d) = a.frac()
(n2, d2) = b.frac()
return Progress(n + n2, d + d2)
|
import logging
from gym.envs.registration import register
logger = logging.getLogger(__name__)
register(
id="EnFrSNmtEnv-v0",
entry_point="fairseq.RL.env:SNmtEnv"
)
|
#ifndef Podd_BdataLoc_h_
#define Podd_BdataLoc_h_
//////////////////////////////////////////////////////////////////////////
//
// BdataLoc
//
//////////////////////////////////////////////////////////////////////////
#include "THaAnalysisObject.h"
#include "TString.h"
#include <vector>
#include <cassert>
#include <set>
class THaEvData;
class TObjArray;
//___________________________________________________________________________
class BdataLoc : public TNamed {
// Utility class used by THaDecData.
// Data location, either in (crates, slots, channel), or
// relative to a unique header in a crate or in an event.
public:
// Helper class for holding info on BdataLoc classes
class BdataLocType {
public:
BdataLocType( const char* cl, const char* key, Int_t np, void* ptr = nullptr )
: fClassName(cl), fDBkey(key), fNparams(np), fOptptr(ptr), fTClass(nullptr) {}
// The database keys have to be unique, so use them for sorting
bool operator<( const BdataLocType& rhs ) const { return fDBkey < rhs.fDBkey; }
const char* fClassName; // Name of class to use for this data type
const char* fDBkey; // Database key name to search for definitions
Int_t fNparams; // Number of database parameters for this type
void* fOptptr; // Optional pointer to arbitrary data
mutable TClass* fTClass; // Pointer to ROOT class representing the type
};
typedef std::set<BdataLocType> TypeSet_t;
typedef TypeSet_t::iterator TypeIter_t;
// Returns set of all defined (i.e. compiled & loaded) BdataLoc classes
static TypeSet_t& fgBdataLocTypes();
BdataLoc() : crate(0), data(0) {} // For ROOT TClass & I/O
virtual ~BdataLoc();
// Main function: extract the defined data from the event
virtual void Load( const THaEvData& evt ) = 0;
// Initialization from TObjString parameters in TObjArray
virtual Int_t Configure( const TObjArray* params, Int_t start = 0 );
// Type-specific data
virtual Int_t GetNparams() const = 0;
virtual const char* GetTypeKey() const = 0;
// Optional data passed in via generic pointer
virtual Int_t OptionPtr( void* ) { return 0; }
virtual void Clear( const Option_t* ="" ) { data = kMaxUInt; }
virtual Bool_t DidLoad() const { return (data != kMaxUInt); }
virtual UInt_t NumHits() const { return DidLoad() ? 1 : 0; }
virtual UInt_t Get( UInt_t i = 0 ) const { assert(DidLoad() && i == 0); return data; }
virtual void Print( Option_t* opt="" ) const;
//TODO: Needed?
Bool_t operator==( const char* aname ) const { return fName == aname; }
// operator== and != compare the hardware definitions of two BdataLoc's
// virtual Bool_t operator==( const BdataLoc& rhs ) const
// { return (crate == rhs.crate); }
// Bool_t operator!=( const BdataLoc& rhs ) const { return !(*this==rhs); }
typedef THaAnalysisObject::EMode EMode;
virtual Int_t DefineVariables( EMode mode = THaAnalysisObject::kDefine );
// Helper function for parameter parsing
static TString& GetString( const TObjArray* params, Int_t pos )
{ return Podd::GetObjArrayString(params, pos); }
protected:
// Abstract base class constructor
BdataLoc( const char* name, UInt_t cra )
: TNamed(name,name), crate(cra), data(kMaxUInt) { }
UInt_t crate; // Data location: crate number
UInt_t data; // Raw data word
Int_t CheckConfigureParams( const TObjArray* params, Int_t start ) const;
void PrintNameType( Option_t* opt="" ) const;
static TypeIter_t DoRegister( const BdataLocType& registration_info );
// Bit used by DefineVariables
enum { kIsSetup = BIT(14) };
ClassDef(BdataLoc,0)
};
//___________________________________________________________________________
class CrateLoc : public BdataLoc {
public:
// c'tor for (crate,slot,channel) selection
CrateLoc( const char* nm, UInt_t cra, UInt_t slo, UInt_t cha )
: BdataLoc(nm,cra), slot(slo), chan(cha) { ResetBit(kIsSetup); }
CrateLoc() : slot(0), chan(0) {}
virtual ~CrateLoc() = default;
virtual void Load( const THaEvData& evt );
virtual Int_t Configure( const TObjArray* params, Int_t start = 0 );
virtual Int_t GetNparams() const { return fgThisType->fNparams; }
virtual const char* GetTypeKey() const { return fgThisType->fDBkey; };
virtual void Print( Option_t* opt="" ) const;
// virtual Bool_t operator==( const BdataLoc& rhs ) const
// { return (crate == rhs.crate && slot == rhs.slot && chan == rhs.chan); }
protected:
UInt_t slot, chan; // Data location: slot and channel
void PrintCrateLocHeader( Option_t* opt="" ) const;
private:
static TypeIter_t fgThisType;
ClassDef(CrateLoc,0)
};
//___________________________________________________________________________
class CrateLocMulti : public CrateLoc {
public:
// (crate,slot,channel) allowing for multiple hits per channel
CrateLocMulti( const char* nm, UInt_t cra, UInt_t slo, UInt_t cha )
: CrateLoc(nm,cra,slo,cha) { }
CrateLocMulti() = default;
virtual ~CrateLocMulti() = default;
virtual void Load( const THaEvData& evt );
virtual void Clear( const Option_t* ="" ) { CrateLoc::Clear(); rdata.clear(); }
virtual UInt_t NumHits() const { return rdata.size(); }
virtual UInt_t Get( UInt_t i = 0 ) const { return rdata.at(i); }
virtual Int_t GetNparams() const { return fgThisType->fNparams; }
virtual const char* GetTypeKey() const { return fgThisType->fDBkey; };
virtual void Print( Option_t* opt="" ) const;
virtual Int_t DefineVariables( EMode mode = THaAnalysisObject::kDefine );
protected:
std::vector<UInt_t> rdata; // raw data
void PrintMultiData( Option_t* opt="" ) const;
private:
static TypeIter_t fgThisType;
ClassDef(CrateLocMulti,0)
};
//___________________________________________________________________________
class WordLoc : public BdataLoc {
public:
// c'tor for header search
WordLoc( const char* nm, UInt_t cra, UInt_t head, UInt_t skip )
: BdataLoc(nm,cra), header(head), ntoskip(skip) { }
WordLoc() : header(0), ntoskip(1) {}
virtual ~WordLoc() = default;
virtual void Load( const THaEvData& evt );
virtual Int_t Configure( const TObjArray* params, Int_t start = 0 );
virtual Int_t GetNparams() const { return fgThisType->fNparams; }
virtual const char* GetTypeKey() const { return fgThisType->fDBkey; };
virtual void Print( Option_t* opt="" ) const;
// virtual Bool_t operator==( const BdataLoc& rhs ) const
// { return (crate == rhs.crate &&
// header == rhs.header && ntoskip == rhs.ntoskip); }
protected:
UInt_t header; // header (unique either in data or in crate)
UInt_t ntoskip; // how far to skip beyond header
private:
static TypeIter_t fgThisType;
ClassDef(WordLoc,0)
};
//___________________________________________________________________________
class RoclenLoc : public BdataLoc {
public:
// Event length of a crate
RoclenLoc( const char* nm, UInt_t cra ) : BdataLoc(nm, cra) { }
RoclenLoc() = default;
virtual ~RoclenLoc() = default;
virtual void Load( const THaEvData& evt );
virtual Int_t GetNparams() const { return fgThisType->fNparams; }
virtual const char* GetTypeKey() const { return fgThisType->fDBkey; };
private:
static TypeIter_t fgThisType;
ClassDef(RoclenLoc,0)
};
///////////////////////////////////////////////////////////////////////////////
#endif
|
/** @jest-environment jsdom */
import { shallow } from '@vue/test-utils';
import PostCss from './fixtures/VuePostCss';
describe('processes .vue file with PostCSS style', () => {
it('does not error on pcss/postcss', () => {
const wrapper = shallow(PostCss);
expect(wrapper.classes()).toContain('testPcss');
});
});
|
import urllib.request, urllib.parse, urllib.error
import xml.etree.ElementTree as ET
url='http://py4e-data.dr-chuck.net/comments_128455.xmls'
uh=urllib.request.urlopen(url)
data=uh.read()
tree = ET.fromstring(data)
counts=tree.findall('comments/comment')
print(len(counts))
sum=0
for item in counts:
y=item.find('count').text
y=int(y)
sum=sum+y
print(sum)
|
""" Benchmark linalg.sqrtm for various blocksizes.
"""
from __future__ import division, absolute_import, print_function
import numpy as np
from numpy.testing import assert_allclose
import scipy.linalg
class Sqrtm(object):
params = [
['float64', 'complex128'],
[64, 256],
[32, 64, 256]
]
param_names = ['dtype', 'n', 'blocksize']
goal_time = 0.5
def setup(self, dtype, n, blocksize):
n = int(n)
dtype = np.dtype(dtype)
blocksize = int(blocksize)
A = np.random.rand(n, n)
if dtype == np.complex128:
A = A + 1j*np.random.rand(n, n)
self.A = A
if blocksize > n:
raise NotImplementedError()
def time_sqrtm(self, dtype, n, blocksize):
scipy.linalg.sqrtm(self.A, disp=False, blocksize=blocksize)
|
from dagster import execute_pipeline
def test_example_shell_command_solid():
from .example_shell_command_solid import pipe
res = execute_pipeline(pipe)
assert res.success
assert res.result_for_solid('a').output_value() == 'hello, world!\n'
def test_example_shell_script_solid():
from .example_shell_script_solid import pipe
res = execute_pipeline(pipe)
assert res.success
assert res.result_for_solid('a').output_value() == 'hello, world!\n'
|
var express = require("express");
var PORT = process.env.PORT || 8080;
var app = express();
var dotenv = require("dotenv").config();
app.use(express.static("public"));
app.use(express.urlencoded({ extended: true }));
app.use(express.json());
var exphbs = require("express-handlebars");
app.engine("handlebars", exphbs({ defaultLayout: "main" }));
app.set("view engine", "handlebars");
var routes = require("./controllers/burgerController.js");
app.use(routes);
app.listen(PORT, () => {
console.log("Server listening on: http://localhost:" + PORT);
});
|
/*++
Copyright (c) 1986-1997 Microsoft Corporation
Module Name:
stireg.h
Abstract:
This module contains the STI registry entries
Author:
Revision History:
--*/
#ifndef _STIREG_
#define _STIREG_
#include <winapifamily.h>
#pragma region Desktop Family
#if WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP)
//
// Registry keys and values
//
#define REGSTR_VAL_TYPE_W L"Type"
#define REGSTR_VAL_VENDOR_NAME_W L"Vendor"
#define REGSTR_VAL_DEVICETYPE_W L"DeviceType"
#define REGSTR_VAL_DEVICESUBTYPE_W L"DeviceSubType"
#define REGSTR_VAL_DEV_NAME_W L"DeviceName"
#define REGSTR_VAL_DRIVER_DESC_W L"DriverDesc"
#define REGSTR_VAL_FRIENDLY_NAME_W L"FriendlyName"
#define REGSTR_VAL_GENERIC_CAPS_W L"Capabilities"
#define REGSTR_VAL_HARDWARE_W L"HardwareConfig"
#define REGSTR_VAL_HARDWARE TEXT("HardwareConfig")
#define REGSTR_VAL_DEVICE_NAME_W L"DriverDesc"
#define REGSTR_VAL_DATA_W L"DeviceData"
#define REGSTR_VAL_GUID_W L"GUID"
#define REGSTR_VAL_GUID TEXT("GUID")
#define REGSTR_VAL_LAUNCH_APPS_W L"LaunchApplications"
#define REGSTR_VAL_LAUNCH_APPS TEXT("LaunchApplications")
#define REGSTR_VAL_LAUNCHABLE_W L"Launchable"
#define REGSTR_VAL_LAUNCHABLE TEXT("Launchable")
#if (_WIN32_WINNT >= 0x0600) // Windows Vista and later
#define REGSTR_VAL_SHUTDOWNDELAY_W L"ShutdownIfUnusedDelay"
#define REGSTR_VAL_SHUTDOWNDELAY TEXT("ShutdownIfUnusedDelay")
#endif //#if (_WIN32_WINNT >= 0x0600)
#if (_WIN32_WINNT >= 0x0501) // Windows XP and later
//
// CustomDeviceProperty names and values
//
#define IS_DIGITAL_CAMERA_STR L"IsDigitalCamera"
#define IS_DIGITAL_CAMERA_VAL 1
#define SUPPORTS_MSCPLUS_STR L"SupportsMSCPlus"
#define SUPPORTS_MSCPLUS_VAL 1
#endif //#if (_WIN32_WINNT >= 0x0501)
//
// Device instance value names
//
#define STI_DEVICE_VALUE_TWAIN_NAME L"TwainDS"
#define STI_DEVICE_VALUE_ISIS_NAME L"ISISDriverName"
#define STI_DEVICE_VALUE_ICM_PROFILE L"ICMProfile"
#define STI_DEVICE_VALUE_DEFAULT_LAUNCHAPP L"DefaultLaunchApp"
#define STI_DEVICE_VALUE_TIMEOUT L"PollTimeout"
#define STI_DEVICE_VALUE_DISABLE_NOTIFICATIONS L"DisableNotifications"
#define REGSTR_VAL_BAUDRATE L"BaudRate"
#define STI_DEVICE_VALUE_TWAIN_NAME_A "TwainDS"
#define STI_DEVICE_VALUE_ISIS_NAME_A "ISISDriverName"
#define STI_DEVICE_VALUE_ICM_PROFILE_A "ICMProfile"
#define STI_DEVICE_VALUE_DEFAULT_LAUNCHAPP_A "DefaultLaunchApp"
#define STI_DEVICE_VALUE_TIMEOUT_A "PollTimeout"
#define STI_DEVICE_VALUE_DISABLE_NOTIFICATIONS_A "DisableNotifications"
#define REGSTR_VAL_BAUDRATE_A "BaudRate"
//
// DEVPKEY_WIA_DeviceType
//
// The property GUID is the same as for the Imaging device class: {6BDD1FC6-810F-11D0-BEC7-08002BE2092F}
//
// Namespace: System.Devices.WiaDeviceType
//
// Type: DEVPROP_TYPE_UINT32
//
// This property can be set to any of the STI_DEVICE_TYPE enumeration values (see sti.h), including:
//
// 1 - StiDeviceTypeScanner (still image scanner: STI, WIA 1.0 or WIA 2.0)
// 2 - StiDeviceTypeDigitalCamera (still digital camera, WIA 1.0)
// 3 - StiDeviceTypeStreamingVideo (video streaming device such as webcam, still of the Imaging class,
// formerly serviced by WIA Video but no longer STI/WIA compliant)
//
// For example a modern app can use this property to enumerate WIA scanner devices:
//
// System.Devices.InterfaceClassGuid:="{6bdd1fc6-810f-11d0-bec7-08002be2092f}" AND System.Devices.WiaDeviceType:=1
//
// ... where {6bdd1fc6-810f-11d0-bec7-08002be2092f} is the GUID of the Imaging device class and 1 is StiDeviceTypeScanner.
// Also note in this example that the app accesses the property via its namespace (System.Devices.WiaDeviceType), not via
// its internal DEVPKEY definition.
//
#include <devpropdef.h>
DEFINE_DEVPROPKEY(DEVPKEY_WIA_DeviceType, 0x6bdd1fc6, 0x810f, 0x11d0, 0xbe, 0xc7, 0x08, 0x00, 0x2b, 0xe2, 0x09, 0x2f, 2);
#endif /* WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP) */
#pragma endregion
#endif // _STIREG_
|
/**
* @fileoverview added by tsickle
* @suppress {checkTypes,constantProperty,extraRequire,missingOverride,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc
*/
import { Component, EventEmitter, forwardRef, Host, Input, Output } from '@angular/core';
import { Raster } from 'ol/source';
import { RasterOperationType } from 'ol/source/Raster';
import { LayerImageComponent } from '../layers/layerimage.component';
import { SourceComponent } from './source.component';
import { Operation } from 'ol/source/Raster';
export class SourceRasterComponent extends SourceComponent {
/**
* @param {?} layer
*/
constructor(layer) {
super(layer);
this.beforeOperations = new EventEmitter();
this.afterOperations = new EventEmitter();
this.sources = [];
}
/**
* @return {?}
*/
ngAfterContentInit() {
this.init();
}
/**
* @return {?}
*/
init() {
this.instance = new Raster(this);
this.instance.on('beforeoperations', (/**
* @param {?} event
* @return {?}
*/
(event) => this.beforeOperations.emit(event)));
this.instance.on('afteroperations', (/**
* @param {?} event
* @return {?}
*/
(event) => this.afterOperations.emit(event)));
this._register(this.instance);
}
}
SourceRasterComponent.decorators = [
{ type: Component, args: [{
selector: 'aol-source-raster',
template: `
<ng-content></ng-content>
`,
providers: [
{
provide: SourceComponent,
useExisting: forwardRef((/**
* @return {?}
*/
() => SourceRasterComponent)),
},
]
}] }
];
/** @nocollapse */
SourceRasterComponent.ctorParameters = () => [
{ type: LayerImageComponent, decorators: [{ type: Host }] }
];
SourceRasterComponent.propDecorators = {
operation: [{ type: Input }],
threads: [{ type: Input }],
lib: [{ type: Input }],
operationType: [{ type: Input }],
beforeOperations: [{ type: Output }],
afterOperations: [{ type: Output }]
};
if (false) {
/** @type {?} */
SourceRasterComponent.prototype.instance;
/** @type {?} */
SourceRasterComponent.prototype.operation;
/** @type {?} */
SourceRasterComponent.prototype.threads;
/** @type {?} */
SourceRasterComponent.prototype.lib;
/** @type {?} */
SourceRasterComponent.prototype.operationType;
/** @type {?} */
SourceRasterComponent.prototype.beforeOperations;
/** @type {?} */
SourceRasterComponent.prototype.afterOperations;
/** @type {?} */
SourceRasterComponent.prototype.sources;
}
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoicmFzdGVyLmNvbXBvbmVudC5qcyIsInNvdXJjZVJvb3QiOiJuZzovL25neC1vcGVubGF5ZXJzLyIsInNvdXJjZXMiOlsibGliL3NvdXJjZXMvcmFzdGVyLmNvbXBvbmVudC50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiOzs7O0FBQUEsT0FBTyxFQUFvQixTQUFTLEVBQUUsWUFBWSxFQUFFLFVBQVUsRUFBRSxJQUFJLEVBQUUsS0FBSyxFQUFFLE1BQU0sRUFBRSxNQUFNLGVBQWUsQ0FBQztBQUMzRyxPQUFPLEVBQUUsTUFBTSxFQUFVLE1BQU0sV0FBVyxDQUFDO0FBQzNDLE9BQU8sRUFBRSxtQkFBbUIsRUFBcUIsTUFBTSxrQkFBa0IsQ0FBQztBQUMxRSxPQUFPLEVBQUUsbUJBQW1CLEVBQUUsTUFBTSxnQ0FBZ0MsQ0FBQztBQUNyRSxPQUFPLEVBQUUsZUFBZSxFQUFFLE1BQU0sb0JBQW9CLENBQUM7QUFDckQsT0FBTyxFQUFFLFNBQVMsRUFBRSxNQUFNLGtCQUFrQixDQUFDO0FBYzdDLE1BQU0sT0FBTyxxQkFBc0IsU0FBUSxlQUFlOzs7O0lBbUJ4RCxZQUFvQixLQUEwQjtRQUM1QyxLQUFLLENBQUMsS0FBSyxDQUFDLENBQUM7UUFQZixxQkFBZ0IsR0FBb0MsSUFBSSxZQUFZLEVBQXFCLENBQUM7UUFFMUYsb0JBQWUsR0FBb0MsSUFBSSxZQUFZLEVBQXFCLENBQUM7UUFFekYsWUFBTyxHQUFhLEVBQUUsQ0FBQztJQUl2QixDQUFDOzs7O0lBRUQsa0JBQWtCO1FBQ2hCLElBQUksQ0FBQyxJQUFJLEVBQUUsQ0FBQztJQUNkLENBQUM7Ozs7SUFFRCxJQUFJO1FBQ0YsSUFBSSxDQUFDLFFBQVEsR0FBRyxJQUFJLE1BQU0sQ0FBQyxJQUFJLENBQUMsQ0FBQztRQUNqQyxJQUFJLENBQUMsUUFBUSxDQUFDLEVBQUUsQ0FBQyxrQkFBa0I7Ozs7UUFBRSxDQUFDLEtBQXdCLEVBQUUsRUFBRSxDQUFDLElBQUksQ0FBQyxnQkFBZ0IsQ0FBQyxJQUFJLENBQUMsS0FBSyxDQUFDLEVBQUMsQ0FBQztRQUN0RyxJQUFJLENBQUMsUUFBUSxDQUFDLEVBQUUsQ0FBQyxpQkFBaUI7Ozs7UUFBRSxDQUFDLEtBQXdCLEVBQUUsRUFBRSxDQUFDLElBQUksQ0FBQyxlQUFlLENBQUMsSUFBSSxDQUFDLEtBQUssQ0FBQyxFQUFDLENBQUM7UUFDcEcsSUFBSSxDQUFDLFNBQVMsQ0FBQyxJQUFJLENBQUMsUUFBUSxDQUFDLENBQUM7SUFDaEMsQ0FBQzs7O1lBNUNGLFNBQVMsU0FBQztnQkFDVCxRQUFRLEVBQUUsbUJBQW1CO2dCQUM3QixRQUFRLEVBQUU7O0dBRVQ7Z0JBQ0QsU0FBUyxFQUFFO29CQUNUO3dCQUNFLE9BQU8sRUFBRSxlQUFlO3dCQUN4QixXQUFXLEVBQUUsVUFBVTs7O3dCQUFDLEdBQUcsRUFBRSxDQUFDLHFCQUFxQixFQUFDO3FCQUNyRDtpQkFDRjthQUNGOzs7O1lBZlEsbUJBQW1CLHVCQW1DYixJQUFJOzs7d0JBaEJoQixLQUFLO3NCQUVMLEtBQUs7a0JBRUwsS0FBSzs0QkFFTCxLQUFLOytCQUdMLE1BQU07OEJBRU4sTUFBTTs7OztJQWJQLHlDQUFpQjs7SUFFakIsMENBQ3NCOztJQUN0Qix3Q0FDaUI7O0lBQ2pCLG9DQUNhOztJQUNiLDhDQUNvQzs7SUFFcEMsaURBQzBGOztJQUMxRixnREFDeUY7O0lBRXpGLHdDQUF1QiIsInNvdXJjZXNDb250ZW50IjpbImltcG9ydCB7IEFmdGVyQ29udGVudEluaXQsIENvbXBvbmVudCwgRXZlbnRFbWl0dGVyLCBmb3J3YXJkUmVmLCBIb3N0LCBJbnB1dCwgT3V0cHV0IH0gZnJvbSAnQGFuZ3VsYXIvY29yZSc7XG5pbXBvcnQgeyBSYXN0ZXIsIFNvdXJjZSB9IGZyb20gJ29sL3NvdXJjZSc7XG5pbXBvcnQgeyBSYXN0ZXJPcGVyYXRpb25UeXBlLCBSYXN0ZXJTb3VyY2VFdmVudCB9IGZyb20gJ29sL3NvdXJjZS9SYXN0ZXInO1xuaW1wb3J0IHsgTGF5ZXJJbWFnZUNvbXBvbmVudCB9IGZyb20gJy4uL2xheWVycy9sYXllcmltYWdlLmNvbXBvbmVudCc7XG5pbXBvcnQgeyBTb3VyY2VDb21wb25lbnQgfSBmcm9tICcuL3NvdXJjZS5jb21wb25lbnQnO1xuaW1wb3J0IHsgT3BlcmF0aW9uIH0gZnJvbSAnb2wvc291cmNlL1Jhc3Rlcic7XG5cbkBDb21wb25lbnQoe1xuICBzZWxlY3RvcjogJ2FvbC1zb3VyY2UtcmFzdGVyJyxcbiAgdGVtcGxhdGU6IGBcbiAgICA8bmctY29udGVudD48L25nLWNvbnRlbnQ+XG4gIGAsXG4gIHByb3ZpZGVyczogW1xuICAgIHtcbiAgICAgIHByb3ZpZGU6IFNvdXJjZUNvbXBvbmVudCxcbiAgICAgIHVzZUV4aXN0aW5nOiBmb3J3YXJkUmVmKCgpID0+IFNvdXJjZVJhc3RlckNvbXBvbmVudCksXG4gICAgfSxcbiAgXSxcbn0pXG5leHBvcnQgY2xhc3MgU291cmNlUmFzdGVyQ29tcG9uZW50IGV4dGVuZHMgU291cmNlQ29tcG9uZW50IGltcGxlbWVudHMgQWZ0ZXJDb250ZW50SW5pdCB7XG4gIGluc3RhbmNlOiBSYXN0ZXI7XG5cbiAgQElucHV0KClcbiAgb3BlcmF0aW9uPzogT3BlcmF0aW9uO1xuICBASW5wdXQoKVxuICB0aHJlYWRzPzogbnVtYmVyO1xuICBASW5wdXQoKVxuICBsaWI/OiBPYmplY3Q7XG4gIEBJbnB1dCgpXG4gIG9wZXJhdGlvblR5cGU/OiBSYXN0ZXJPcGVyYXRpb25UeXBlO1xuXG4gIEBPdXRwdXQoKVxuICBiZWZvcmVPcGVyYXRpb25zOiBFdmVudEVtaXR0ZXI8UmFzdGVyU291cmNlRXZlbnQ+ID0gbmV3IEV2ZW50RW1pdHRlcjxSYXN0ZXJTb3VyY2VFdmVudD4oKTtcbiAgQE91dHB1dCgpXG4gIGFmdGVyT3BlcmF0aW9uczogRXZlbnRFbWl0dGVyPFJhc3RlclNvdXJjZUV2ZW50PiA9IG5ldyBFdmVudEVtaXR0ZXI8UmFzdGVyU291cmNlRXZlbnQ+KCk7XG5cbiAgc291cmNlczogU291cmNlW10gPSBbXTtcblxuICBjb25zdHJ1Y3RvcihASG9zdCgpIGxheWVyOiBMYXllckltYWdlQ29tcG9uZW50KSB7XG4gICAgc3VwZXIobGF5ZXIpO1xuICB9XG5cbiAgbmdBZnRlckNvbnRlbnRJbml0KCkge1xuICAgIHRoaXMuaW5pdCgpO1xuICB9XG5cbiAgaW5pdCgpIHtcbiAgICB0aGlzLmluc3RhbmNlID0gbmV3IFJhc3Rlcih0aGlzKTtcbiAgICB0aGlzLmluc3RhbmNlLm9uKCdiZWZvcmVvcGVyYXRpb25zJywgKGV2ZW50OiBSYXN0ZXJTb3VyY2VFdmVudCkgPT4gdGhpcy5iZWZvcmVPcGVyYXRpb25zLmVtaXQoZXZlbnQpKTtcbiAgICB0aGlzLmluc3RhbmNlLm9uKCdhZnRlcm9wZXJhdGlvbnMnLCAoZXZlbnQ6IFJhc3RlclNvdXJjZUV2ZW50KSA9PiB0aGlzLmFmdGVyT3BlcmF0aW9ucy5lbWl0KGV2ZW50KSk7XG4gICAgdGhpcy5fcmVnaXN0ZXIodGhpcy5pbnN0YW5jZSk7XG4gIH1cbn1cbiJdfQ==
|
"""
WSGI config for games_logger project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'games_logger.settings')
application = get_wsgi_application()
|
'''
@Params: directory of json files and directory of corresponding frames. Make sure that json files has name like <number>.json
for example: 897.json and the corresponding image file has name frame_897.jpg
TO RUN
command: python classify_data_by_angle.py -j <json_files_directory> -f <frames_directory>
OUTPUT: two directory: one for up images and the other is for down images
'''
import json
import sys
import cv2
import numpy as np
import math
import os, sys, shutil
import argparse
class Point(object):
def __init__(self, x, y, c):
self.x=x # x_coordinate of the point
self.y=y # y_coordinate of the point
self.c=c # confidence of the point
def __repr__(self):
return 'x: '+str(self.x)+", y: "+str(self.y)+", c: "+str(self.c)
# Set command line arguments
ap = argparse.ArgumentParser()
ap.add_argument('-j', '--json', required = True, help = "Input json files.")
ap.add_argument('-f', '--frame', required = True, help = "Input frame files.")
args = vars(ap.parse_args())
jsons_dir = args['json']+'/'
frames_dir = args['frame']+'/'
output_dir_up = './output_up/'
output_dir_down = './output_down/'
# Clear if the output directory exists
if os.path.exists(output_dir_up):
shutil.rmtree(output_dir_up, ignore_errors = True)
if os.path.exists(output_dir_down):
shutil.rmtree(output_dir_down, ignore_errors = True)
# Create the output directory
os.mkdir(output_dir_up)
os.mkdir(output_dir_down)
for json_file in os.listdir(jsons_dir):
json_no = str(int(json_file.split('_')[2]))
with open(jsons_dir+json_file) as f:
data = json.loads(f.read())
person = data['people'][0]
pose= person['pose_keypoints_2d']
face = person['face_keypoints_2d']
left_hand = person['hand_left_keypoints_2d']
right_hand = person['hand_right_keypoints_2d']
########## EXTRACT POSE POINTS ##########
i = 0
pose_points = []
for i in range(0, len(pose), 3):
temp = Point(pose[i],pose[i+1],pose[i+2])
pose_points.append(temp) # add this point to pose point array
########## EXTRACT FACE POINTS ##########
i = 0
face_points = []
for i in range(0, len(face), 3):
temp = Point(face[i],face[i+1],face[i+2])
face_points.append(temp) # add this point to pose point array
i+=3
########## EXTRACT LEFT HAND POINTS ##########
i = 0
lhand_points = []
for i in range(0, len(left_hand), 3):
temp = Point(left_hand[i],left_hand[i+1],left_hand[i+2])
lhand_points.append(temp) # add this point to pose point array
i+=3
########## EXTRACT RIGHT HAND POINTS ##########
i = 0
rhand_points = []
for i in range(0, len(right_hand), 3):
temp = Point(right_hand[i],right_hand[i+1],right_hand[i+2])
rhand_points.append(temp) # add this point to pose point array
i+=3
is_left_null = False
is_right_null = False
# CALCULATE LEFT ARM ANGLE
lengt_one = (math.pow(pose_points[5].x-pose_points[6].x,2)+math.pow(pose_points[5].y-pose_points[6].y,2))
lengt_two = (math.pow(pose_points[6].x-pose_points[7].x,2)+math.pow(pose_points[6].y-pose_points[7].y,2))
lengt_three = (math.pow(pose_points[5].x-pose_points[7].x,2)+math.pow(pose_points[5].y-pose_points[7].y,2))
if (-2*math.sqrt(lengt_one)*math.sqrt(lengt_two)) == 0:
continue
LQ = math.acos((lengt_three - lengt_one -lengt_two)/(-2*math.sqrt(lengt_one)*math.sqrt(lengt_two)))
LQ = (LQ*180/math.pi)
# CALCULATE RIGHT ARM ANGLE
lengt_one = (math.pow(pose_points[2].x-pose_points[3].x,2)+math.pow(pose_points[2].y-pose_points[3].y,2))
lengt_two = (math.pow(pose_points[3].x-pose_points[4].x,2)+math.pow(pose_points[3].y-pose_points[4].y,2))
lengt_three = (math.pow(pose_points[2].x-pose_points[4].x,2)+math.pow(pose_points[2].y-pose_points[4].y,2))
RQ = math.acos((lengt_three - lengt_one -lengt_two)/(-2*math.sqrt(lengt_one)*math.sqrt(lengt_two)))
RQ = (RQ*180/math.pi)
if pose_points[4].x == 0 and pose_points[4].y == 0 and pose_points[4].c == 0:
is_right_null = True
if pose_points[7].x == 0 and pose_points[7].y == 0 and pose_points[7].c == 0:
is_left_null = True
print(json_no)
#DOWN
if (is_right_null and is_left_null) or (is_left_null and RQ > 100) or (is_right_null and LQ > 100) or (LQ > 100 and RQ > 100):
shutil.copy(frames_dir+"frame_"+json_no+".jpg", output_dir_down, follow_symlinks=True)
#UP
else:
shutil.copy(frames_dir+"frame_"+json_no+".jpg", output_dir_up, follow_symlinks=True)
key = cv2.waitKey(0)
|
# This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
from pkg_resources import parse_version
import kaitaistruct
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
if parse_version(kaitaistruct.__version__) < parse_version('0.9'):
raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__))
class SwitchMultiBoolOps(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.opcodes = []
i = 0
while not self._io.is_eof():
self.opcodes.append(SwitchMultiBoolOps.Opcode(self._io, self, self._root))
i += 1
class Opcode(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.code = self._io.read_u1()
_on = (self.code if ((self.code > 0) and (self.code <= 8) and ((True if self.code != 10 else False))) else 0)
if _on == 1:
self.body = self._io.read_u1()
elif _on == 2:
self.body = self._io.read_u2le()
elif _on == 4:
self.body = self._io.read_u4le()
elif _on == 8:
self.body = self._io.read_u8le()
|
from ._abstract import AbstractScraper
class Cybercook(AbstractScraper):
@classmethod
def host(cls):
return "cybercook.com.br"
def title(self):
return self.schema.title()
def description(self):
return self.schema.description()
def total_time(self):
return self.schema.total_time()
def prep_time(self):
return self.schema.prep_time()
def cook_time(self):
return self.schema.cook_time()
def yields(self):
return self.schema.yields()
def image(self):
return self.schema.image()
def ingredients(self):
return self.schema.ingredients()
def instructions(self):
return self.schema.instructions()
def ratings(self):
return self.schema.ratings()
|
import boto3
import logging
import datetime
import json
from datetime import datetime, timezone
from aws_session_management.aws_session_management import AwsSessionManagement
logger = logging.getLogger(__name__)
class KinesisDataStreamHandler(logging.StreamHandler):
def __init__(self, kinesis_stream_name, subsystem, component, action, project_name, env, version, aws_session_management:AwsSessionManagement):
# By default, logging.StreamHandler uses sys.stderr if stream parameter is not specified
logging.StreamHandler.__init__(self)
self.__aws_session_management = aws_session_management
self.__stream_buffer = []
self.__kinesis_stream_name = kinesis_stream_name
self.__project_name = project_name
self.__env = env
self.__version = version
self.__action = action
self.__subsystem = subsystem
self.__component = component
def format(self, record):
data = {
"src_timestamp": datetime.now(timezone.utc).isoformat(),
"component": self.__component,
"action": self.__action,
"syslog_severity": record.levelname,
"message": record.getMessage(),
"subsystem": self.__subsystem,
"project": self.__project_name,
"environment": self.__env,
"version": self.__version
}
if hasattr(record, 'requestId'):
data['requestId'] = str(record.requestId)
return json.dumps(data)
def emit(self, record):
try:
msg = self.format(record)
if self.__kinesis_stream_name and self.__aws_session_management:
self.__stream_buffer.append({
'Data': msg.encode(encoding="UTF-8", errors="strict"),
'PartitionKey': record.threadName
})
else:
stream = self.stream
stream.write(msg)
stream.write(self.terminator)
self.flush()
except Exception as e:
logger.error(f"Failed emitting record: {e}")
self.handleError(record)
def flush(self):
self.acquire()
try:
if self.__kinesis_stream_name and self.__aws_session_management and self.__stream_buffer:
kinesis_client = self.__aws_session_management.get_func_res()
kinesis_client.put_records(StreamName=self.__kinesis_stream_name, Records=self.__stream_buffer)
self.__stream_buffer.clear()
except Exception as e:
logger.error("An error occurred during flush operation.")
logger.error(f"Exception: {e}")
logger.error(f"Stream buffer: {self.__stream_buffer}")
raise e
finally:
if self.stream and hasattr(self.stream, "flush"):
self.stream.flush()
self.release()
|
from datetime import date
from typing import List, Optional
from pydantic import BaseModel, EmailStr, HttpUrl
class Model(BaseModel):
class Config:
@classmethod
def alias_generator(cls, value: str) -> str:
[word, *words] = value.split("_")
return "".join([word] + [word.capitalize() for word in words])
class DatedEntry(Model):
"""Represents an entry having a start and end date."""
start_date: date
end_date: Optional[date]
class NamedKeywords(Model):
"""Represents details describing a named list of keywords."""
name: str
keywords: Optional[List[str]]
# =============================================================================
# Personal Info
class Location(Model):
"""Represents a physical contact address."""
address: str
postal_code: Optional[str]
region: str
city: Optional[str]
country_code: Optional[str] = "NG"
class Profile(Model):
"""Represents a profile on a social or professional network."""
network: str
username: str
url: Optional[HttpUrl]
class PersonalInfo(Model):
"""Represents personal details for resume owner."""
name: str
label: str
email: EmailStr
location: Location
phone: Optional[str]
picture: Optional[str]
summary: Optional[str]
website: Optional[HttpUrl]
profiles: List[Profile]
class Reference(Model):
"""Represents details describing reference received."""
name: str
reference: str
# =============================================================================
# Experience & Education
class Education(DatedEntry):
"""Represents details describing an educational qualification."""
institution: str
area: str
study_type: str = "Bachelor"
gpa: Optional[str]
courses: Optional[List[str]]
class Experience(DatedEntry):
"""Represents base model for experience related objects."""
position: str
summary: Optional[str]
website: Optional[HttpUrl]
highlights: Optional[List[str]]
class Work(Experience):
"""Represents details describing a work-related experience."""
company: str
class Volunteer(Experience):
"""Represents details describing a volunteer-related experience."""
organization: str
# =============================================================================
# Ability
class Language(Model):
"""Represents details describing language spoken."""
language: str = "English"
fluency: str
class Skill(NamedKeywords):
"""Represents details describing skill."""
level: str
# =============================================================================
# Achievements
class Award(Model):
"""Represents details describing a received award."""
title: str
awarder: str
date: date
summary: Optional[str]
class Publication(Model):
"""Represents details describing a publication."""
name: str
publisher: str
release_date: date
summary: Optional[str]
website: Optional[HttpUrl]
# =============================================================================
# Rezume
class Rezume(Model):
"""Represents resume data."""
basics: PersonalInfo
work: Optional[List[Work]]
volunteer: Optional[List[Volunteer]]
education: List[Education]
awards: Optional[List[Award]]
publications: Optional[List[Publication]]
skills: Optional[List[Skill]]
languages: Optional[List[Language]]
interests: Optional[List[NamedKeywords]]
references: Optional[List[Reference]]
|
#!/usr/bin/env python
# Lint as: python3
# -*- encoding: utf-8 -*-
"""Tests for CSV output plugin."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import csv
import io
import os
import zipfile
from absl import app
import yaml
from grr_response_core.lib.rdfvalues import client as rdf_client
from grr_response_core.lib.rdfvalues import client_fs as rdf_client_fs
from grr_response_core.lib.rdfvalues import paths as rdf_paths
from grr_response_core.lib.util.compat import csv as compat_csv
from grr_response_server.output_plugins import csv_plugin
from grr_response_server.output_plugins import test_plugins
from grr.test_lib import test_lib
class CSVInstantOutputPluginTest(test_plugins.InstantOutputPluginTestBase):
"""Tests instant CSV output plugin."""
plugin_cls = csv_plugin.CSVInstantOutputPlugin
def ProcessValuesToZip(self, values_by_cls):
fd_path = self.ProcessValues(values_by_cls)
file_basename, _ = os.path.splitext(os.path.basename(fd_path))
return zipfile.ZipFile(fd_path), file_basename
def testCSVPluginWithValuesOfSameType(self):
responses = []
for i in range(10):
responses.append(
rdf_client_fs.StatEntry(
pathspec=rdf_paths.PathSpec(
path="/foo/bar/%d" % i, pathtype="OS"),
st_mode=33184, # octal = 100640 => u=rw,g=r,o= => -rw-r-----
st_ino=1063090,
st_dev=64512,
st_nlink=1 + i,
st_uid=139592,
st_gid=5000,
st_size=0,
st_atime=1336469177,
st_mtime=1336129892,
st_ctime=1336129892))
zip_fd, prefix = self.ProcessValuesToZip(
{rdf_client_fs.StatEntry: responses})
self.assertEqual(
set(zip_fd.namelist()),
set([
"%s/MANIFEST" % prefix,
"%s/ExportedFile/from_StatEntry.csv" % prefix
]))
parsed_manifest = yaml.safe_load(zip_fd.read("%s/MANIFEST" % prefix))
self.assertEqual(parsed_manifest,
{"export_stats": {
"StatEntry": {
"ExportedFile": 10
}
}})
with zip_fd.open("%s/ExportedFile/from_StatEntry.csv" % prefix) as filedesc:
content = filedesc.read().decode("utf-8")
parsed_output = list(csv.DictReader(io.StringIO(content)))
self.assertLen(parsed_output, 10)
for i in range(10):
# Make sure metadata is filled in.
self.assertEqual(parsed_output[i]["metadata.client_urn"],
"aff4:/%s" % self.client_id)
self.assertEqual(parsed_output[i]["metadata.hostname"],
"Host-0.example.com")
self.assertEqual(parsed_output[i]["metadata.mac_address"],
"aabbccddee00\nbbccddeeff00")
self.assertEqual(parsed_output[i]["metadata.source_urn"],
self.results_urn)
self.assertEqual(parsed_output[i]["metadata.hardware_info.bios_version"],
"Bios-Version-0")
self.assertEqual(parsed_output[i]["urn"],
"aff4:/%s/fs/os/foo/bar/%d" % (self.client_id, i))
self.assertEqual(parsed_output[i]["st_mode"], "-rw-r-----")
self.assertEqual(parsed_output[i]["st_ino"], "1063090")
self.assertEqual(parsed_output[i]["st_dev"], "64512")
self.assertEqual(parsed_output[i]["st_nlink"], str(1 + i))
self.assertEqual(parsed_output[i]["st_uid"], "139592")
self.assertEqual(parsed_output[i]["st_gid"], "5000")
self.assertEqual(parsed_output[i]["st_size"], "0")
self.assertEqual(parsed_output[i]["st_atime"], "2012-05-08 09:26:17")
self.assertEqual(parsed_output[i]["st_mtime"], "2012-05-04 11:11:32")
self.assertEqual(parsed_output[i]["st_ctime"], "2012-05-04 11:11:32")
self.assertEqual(parsed_output[i]["st_blksize"], "0")
self.assertEqual(parsed_output[i]["st_rdev"], "0")
self.assertEqual(parsed_output[i]["symlink"], "")
def testCSVPluginWithValuesOfMultipleTypes(self):
zip_fd, prefix = self.ProcessValuesToZip({
rdf_client_fs.StatEntry: [
rdf_client_fs.StatEntry(
pathspec=rdf_paths.PathSpec(path="/foo/bar", pathtype="OS"))
],
rdf_client.Process: [rdf_client.Process(pid=42)]
})
self.assertEqual(
set(zip_fd.namelist()),
set([
"%s/MANIFEST" % prefix,
"%s/ExportedFile/from_StatEntry.csv" % prefix,
"%s/ExportedProcess/from_Process.csv" % prefix
]))
parsed_manifest = yaml.safe_load(zip_fd.read("%s/MANIFEST" % prefix))
self.assertEqual(
parsed_manifest, {
"export_stats": {
"StatEntry": {
"ExportedFile": 1
},
"Process": {
"ExportedProcess": 1
}
}
})
with zip_fd.open("%s/ExportedFile/from_StatEntry.csv" % prefix) as filedesc:
content = filedesc.read().decode("utf-8")
parsed_output = list(csv.DictReader(io.StringIO(content)))
self.assertLen(parsed_output, 1)
# Make sure metadata is filled in.
self.assertEqual(parsed_output[0]["metadata.client_urn"],
"aff4:/%s" % self.client_id)
self.assertEqual(parsed_output[0]["metadata.hostname"],
"Host-0.example.com")
self.assertEqual(parsed_output[0]["metadata.mac_address"],
"aabbccddee00\nbbccddeeff00")
self.assertEqual(parsed_output[0]["metadata.source_urn"], self.results_urn)
self.assertEqual(parsed_output[0]["urn"],
"aff4:/%s/fs/os/foo/bar" % self.client_id)
filepath = "%s/ExportedProcess/from_Process.csv" % prefix
with zip_fd.open(filepath) as filedesc:
content = filedesc.read().decode("utf-8")
parsed_output = list(csv.DictReader(io.StringIO(content)))
self.assertLen(parsed_output, 1)
self.assertEqual(parsed_output[0]["metadata.client_urn"],
"aff4:/%s" % self.client_id)
self.assertEqual(parsed_output[0]["metadata.hostname"],
"Host-0.example.com")
self.assertEqual(parsed_output[0]["metadata.mac_address"],
"aabbccddee00\nbbccddeeff00")
self.assertEqual(parsed_output[0]["metadata.source_urn"], self.results_urn)
self.assertEqual(parsed_output[0]["pid"], "42")
def testCSVPluginWritesUnicodeValuesCorrectly(self):
zip_fd, prefix = self.ProcessValuesToZip({
rdf_client_fs.StatEntry: [
rdf_client_fs.StatEntry(
pathspec=rdf_paths.PathSpec(path="/中国新闻网新闻中", pathtype="OS"))
]
})
self.assertEqual(
set(zip_fd.namelist()),
set([
"%s/MANIFEST" % prefix,
"%s/ExportedFile/from_StatEntry.csv" % prefix
]))
data = zip_fd.open("%s/ExportedFile/from_StatEntry.csv" % prefix).read()
parsed_output = list(compat_csv.Reader(data.decode("utf-8")))
self.assertLen(parsed_output, 2)
urn_pos = parsed_output[0].index("urn")
urn = parsed_output[1][urn_pos]
self.assertEqual(urn, "aff4:/C.1000000000000000/fs/os/中国新闻网新闻中")
def testCSVPluginWritesBytesValuesCorrectly(self):
pathspec = rdf_paths.PathSpec.OS(path="/żółta/gęśla/jaźń")
values = {
rdf_client.BufferReference: [
rdf_client.BufferReference(data=b"\xff\x00\xff", pathspec=pathspec),
rdf_client.BufferReference(data=b"\xfa\xfb\xfc", pathspec=pathspec),
],
}
zip_fd, prefix = self.ProcessValuesToZip(values)
manifest_path = "{}/MANIFEST".format(prefix)
data_path = "{}/ExportedMatch/from_BufferReference.csv".format(prefix)
self.assertCountEqual(zip_fd.namelist(), [manifest_path, data_path])
with zip_fd.open(data_path) as data:
results = list(compat_csv.Reader(data.read().decode("utf-8")))
self.assertLen(results, 3)
data_idx = results[0].index("data")
self.assertEqual(results[1][data_idx], "\\xff\\x00\\xff")
self.assertEqual(results[2][data_idx], "\\xfa\\xfb\\xfc")
def testCSVPluginWritesMoreThanOneBatchOfRowsCorrectly(self):
num_rows = csv_plugin.CSVInstantOutputPlugin.ROW_BATCH * 2 + 1
responses = []
for i in range(num_rows):
responses.append(
rdf_client_fs.StatEntry(
pathspec=rdf_paths.PathSpec(
path="/foo/bar/%d" % i, pathtype="OS")))
zip_fd, prefix = self.ProcessValuesToZip(
{rdf_client_fs.StatEntry: responses})
with zip_fd.open("%s/ExportedFile/from_StatEntry.csv" % prefix) as filedesc:
content = filedesc.read().decode("utf-8")
parsed_output = list(csv.DictReader(io.StringIO(content)))
self.assertLen(parsed_output, num_rows)
for i in range(num_rows):
self.assertEqual(parsed_output[i]["urn"],
"aff4:/%s/fs/os/foo/bar/%d" % (self.client_id, i))
def main(argv):
test_lib.main(argv)
if __name__ == "__main__":
app.run(main)
|
from unittest import TestCase
from grab import Grab
from grab.error import GrabTooManyRedirectsError
from .tornado_util import SERVER
from .util import GRAB_TRANSPORT, only_transport
class RedirectController(object):
def __init__(self, counter):
self.setup_counter(counter)
def setup_counter(self, counter):
self.counter = counter
def request_handler(self, server):
if self.counter:
server.set_status(301)
server.set_header('Location', SERVER.BASE_URL)
else:
server.set_status(200)
self.counter -= 1
class RefreshRedirectController(RedirectController):
def request_handler(self, server):
server.set_status(200)
if self.counter:
server.write('<html><head><meta http-equiv="refresh" content="5"></head>')
else:
server.write('OK')
self.counter -= 1
class GrabRedirectTestCase(TestCase):
def setUp(self):
SERVER.reset()
def test_meta_refresh_redirect(self):
# By default meta-redirect is off
meta_url = SERVER.BASE_URL + '/foo'
SERVER.RESPONSE_ONCE['get'] = '<meta http-equiv="refresh" content="5; url=%s">' % meta_url
g = Grab(transport=GRAB_TRANSPORT)
g.go(SERVER.BASE_URL + '/')
self.assertEqual(SERVER.REQUEST['path'], '/')
self.assertEqual(g.response.url, SERVER.BASE_URL + '/')
# Now test meta-auto-redirect
SERVER.RESPONSE_ONCE['get'] = '<meta http-equiv="refresh" content="5; url=%s">' % meta_url
g = Grab(transport=GRAB_TRANSPORT)
g.setup(follow_refresh=True)
g.go(SERVER.BASE_URL)
self.assertEqual(SERVER.REQUEST['path'], '/foo')
self.assertEqual(g.response.url, meta_url)
@only_transport('grab.transport.curl.CurlTransport')
def test_redirect_limit(self):
ctl = RedirectController(10)
SERVER.RESPONSE['get_callback'] = ctl.request_handler
g = Grab(transport=GRAB_TRANSPORT)
g.setup(redirect_limit=5)
self.assertRaises(GrabTooManyRedirectsError,
lambda: g.go(SERVER.BASE_URL))
ctl.setup_counter(10)
g.setup(redirect_limit=20)
g.go(SERVER.BASE_URL)
@only_transport('grab.transport.curl.CurlTransport')
def test_refresh_redirect_limit(self):
ctl = RefreshRedirectController(10)
SERVER.RESPONSE['get_callback'] = ctl.request_handler
g = Grab(transport=GRAB_TRANSPORT)
g.setup(redirect_limit=5, follow_refresh=False)
g.go(SERVER.BASE_URL)
ctl.setup_counter(10)
g.setup(redirect_limit=5, follow_refresh=True)
self.assertRaises(GrabTooManyRedirectsError,
lambda: g.go(SERVER.BASE_URL))
|
const postController = require('./post')
const commentController = require('./comment')
const userController = require('./user')
const homeController = require('./home')
const threadController = require('./thread')
const messageController = require('./message')
const graphController = require('./graph')
module.exports = {
post: postController,
comment: commentController,
user: userController,
home: homeController,
thread: threadController,
message: messageController,
graph: graphController
}
|
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def removeLeafNodes(self, root: TreeNode, target: int) -> TreeNode:
if not root:
return None
root.left = self.removeLeafNodes(root.left, target)
root.right = self.removeLeafNodes(root.right, target)
if root.val == target and not root.left and not root.right:
root = None
return root
|
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
'use strict';
var assert = require('chai').assert;
var sinon = require('sinon');
var EventEmitter = require('events').EventEmitter;
var Amqp = require('../dist/amqp.js').Amqp;
var Client = require('../dist/client.js').Client;
var Message = require('azure-iot-common').Message;
var errors = require('azure-iot-common').errors;
var SimulatedAmqp = require('./amqp_simulated.js');
var transportSpecificTests = require('./_client_common_testrun.js');
describe('Client', function () {
describe('#constructor', function () {
/*Tests_SRS_NODE_IOTHUB_CLIENT_05_001: [The Client constructor shall throw ReferenceError if the transport argument is falsy.]*/
it('throws when transport is falsy', function () {
assert.throws(function () {
return new Client();
}, ReferenceError, 'transport is \'undefined\'');
});
});
describe('#fromConnectionString', function () {
var connStr = 'HostName=a.b.c;SharedAccessKeyName=name;SharedAccessKey=key';
/*Tests_SRS_NODE_IOTHUB_CLIENT_05_002: [The fromConnectionString method shall throw ReferenceError if the connStr argument is falsy.]*/
it('throws when value is falsy', function () {
assert.throws(function () {
return Client.fromConnectionString();
}, ReferenceError, 'connStr is \'undefined\'');
});
/*Tests_SRS_NODE_IOTHUB_CLIENT_16_017: [The `fromConnectionString` method shall use the default Transport (Amqp) if the `Transport` optional argument is falsy.]*/
it('creates an instance of the default transport', function () {
var client = Client.fromConnectionString(connStr);
assert.instanceOf(client._transport, Amqp);
});
/*Tests_SRS_NODE_IOTHUB_CLIENT_16_016: [The `fromConnectionString` method shall use the `Transport` constructor passed as argument to instantiate a transport object if it's not falsy.]*/
it('uses the transport given as argument', function() {
var FakeTransport = function (config) {
assert.isOk(config);
};
var client = Client.fromConnectionString(connStr, FakeTransport);
assert.instanceOf(client._transport, FakeTransport);
});
/*Tests_SRS_NODE_IOTHUB_CLIENT_05_004: [The fromConnectionString method shall return a new instance of the Client object, as by a call to new Client(transport).]*/
it('returns an instance of Client', function () {
var client = Client.fromConnectionString(connStr);
assert.instanceOf(client, Client);
assert.isOk(client._restApiClient);
});
});
describe('#fromSharedAccessSignature', function () {
var token = 'SharedAccessSignature sr=hubName.azure-devices.net&sig=signature&skn=keyname&se=expiry';
/*Tests_SRS_NODE_IOTHUB_CLIENT_05_005: [The fromSharedAccessSignature method shall throw ReferenceError if the sharedAccessSignature argument is falsy.]*/
it('throws when value is falsy', function () {
assert.throws(function () {
return Client.fromSharedAccessSignature();
}, ReferenceError, 'sharedAccessSignature is \'undefined\'');
});
/*Tests_SRS_NODE_IOTHUB_CLIENT_16_018: [The `fromSharedAccessSignature` method shall create a new transport instance and pass it a config object formed from the connection string given as argument.]*/
it('correctly populates the config structure', function() {
var client = Client.fromSharedAccessSignature(token);
assert.equal(client._transport._config.host, 'hubName.azure-devices.net');
assert.equal(client._transport._config.keyName, 'keyname');
assert.equal(client._transport._config.sharedAccessSignature, token);
});
/*Tests_SRS_NODE_IOTHUB_CLIENT_16_020: [The `fromSharedAccessSignature` method shall use the default Transport (Amqp) if the `Transport` optional argument is falsy.]*/
it('creates an instance of the default transport', function () {
var client = Client.fromSharedAccessSignature(token);
assert.instanceOf(client._transport, Amqp);
});
/*Tests_SRS_NODE_IOTHUB_CLIENT_16_019: [The `fromSharedAccessSignature` method shall use the `Transport` constructor passed as argument to instantiate a transport object if it's not falsy.]*/
it('uses the transport given as argument', function() {
var FakeTransport = function (config) {
assert.isOk(config);
};
var client = Client.fromSharedAccessSignature(token, FakeTransport);
assert.instanceOf(client._transport, FakeTransport);
});
/*Tests_SRS_NODE_IOTHUB_CLIENT_05_007: [The fromSharedAccessSignature method shall return a new instance of the Client object, as by a call to new Client(transport).]*/
it('returns an instance of Client', function () {
var client = Client.fromSharedAccessSignature(token);
assert.instanceOf(client, Client);
assert.isOk(client._restApiClient);
});
});
describe('#fromTokenCredential', function () {
var fakeTokenCredential = {
getToken: sinon.stub().resolves({
token: "fake_token",
expiresOnTimeStamp: Date.now() + 3600000
})
};
it('creates an instance of the default transport', function() {
var client = Client.fromTokenCredential("hub.host.tv", fakeTokenCredential);
assert.instanceOf(client._transport, Amqp);
});
it('uses the transport given as argument', function () {
var FakeTransport = function (config) {
assert.isOk(config);
};
var client = Client.fromTokenCredential("hub.host.tv", fakeTokenCredential, FakeTransport);
assert.instanceOf(client._transport, FakeTransport);
});
it('returns an instance of Client', function () {
var client = Client.fromTokenCredential("hub.host.tv", fakeTokenCredential);
assert.instanceOf(client, Client);
assert.isOk(client._restApiClient);
});
it('correctly populates the config structure', function() {
var client = Client.fromTokenCredential("hub.host.tv", fakeTokenCredential);
assert.equal(client._transport._config.host, 'hub.host.tv');
assert.equal(client._transport._config.tokenCredential, fakeTokenCredential);
assert.equal(client._restApiClient._config.host, 'hub.host.tv');
assert.equal(client._restApiClient._config.tokenCredential, fakeTokenCredential);
assert.equal(client._restApiClient._config.tokenScope, 'https://iothubs.azure.net/.default');
});
});
var goodSendParameters = [
{ obj: Buffer.from('foo'), name: 'Buffer' },
{ obj: 'foo', name: 'string' },
{ obj: [], name: 'Array' },
{ obj: new ArrayBuffer(), name: 'ArrayBuffer' }
];
var badSendParameters = [
{ obj: 1, name: 'number' },
{ obj: true, name: 'boolean' },
{ obj: {}, name: 'object' }
];
describe('#send', function () {
var testSubject;
beforeEach('prepare test subject', function () {
testSubject = new Client({}, {});
});
/*Tests_SRS_NODE_IOTHUB_CLIENT_05_013: [The send method shall throw ReferenceError if the deviceId or message arguments are falsy.]*/
it('throws if deviceId is falsy', function () {
assert.throws(function () {
testSubject.send(undefined, {}, () => { });
}, ReferenceError, 'deviceId is \'undefined\'');
});
/*Tests_SRS_NODE_IOTHUB_CLIENT_05_013: [The send method shall throw ReferenceError if the deviceId or message arguments are falsy.]*/
it('throws if message is falsy', function () {
assert.throws(function () {
testSubject.send('id', undefined, () => { });
}, ReferenceError, 'message is \'undefined\'');
});
/*Tests_SRS_NODE_IOTHUB_CLIENT_16_030: [The `send` method shall not throw if the `done` callback is falsy.]*/
it('returns a Promise done is falsy', function () {
var simulatedAmqp = new SimulatedAmqp();
var client = new Client(simulatedAmqp);
const promise = client.send('id', new Message('msg'));
assert.instanceOf(promise, Promise);
promise.catch(console.log);
});
/*Tests_SRS_NODE_IOTHUB_CLIENT_18_016: [The `send` method shall throw an `ArgumentError` if the `message` argument is not of type `azure-iot-common.Message` or `azure-iot-common.Message.BufferConvertible`.]*/
badSendParameters.forEach(function(testConfig) {
it('throws if message is of type ' + testConfig.name, function() {
assert.throws(function () {
testSubject.send('id', testConfig.obj, () => { });
}, errors.ArgumentError);
});
});
/*Tests_SRS_NODE_IOTHUB_CLIENT_05_014: [The `send` method shall convert the `message` object to type `azure-iot-common.Message` if it is not already of type `azure-iot-common.Message`.]*/
goodSendParameters.forEach(function(testConfig) {
it('Converts to message if message is of type ' + testConfig.name, function(testCallback) {
var simulatedAmqp = new SimulatedAmqp();
var client = new Client(simulatedAmqp);
sinon.spy(simulatedAmqp, 'send');
client.send('id', testConfig.obj, function(err, state) {
assert(!err);
assert.equal(state.constructor.name, "MessageEnqueued");
var sentMessage = simulatedAmqp.send.firstCall.args[1];
assert.deepEqual(sentMessage, new Message(testConfig.obj));
testCallback();
});
});
});
});
describe('#invokeDeviceMethod', function() {
/*Tests_SRS_NODE_IOTHUB_CLIENT_16_014: [The `invokeDeviceMethod` method shall throw a `ReferenceError` if `deviceId` is `null`, `undefined` or an empty string.]*/
[undefined, null, ''].forEach(function(badDeviceId) {
it('throws if \'deviceId\' is \'' + badDeviceId + '\'', function() {
var client = new Client({}, {});
assert.throws(function() {
client.invokeDeviceMethod(badDeviceId, 'method', { foo: 'bar' }, 42, function() {});
}, ReferenceError);
});
});
/*Tests_SRS_NODE_IOTHUB_CLIENT_16_006: [The `invokeDeviceMethod` method shall throw a `ReferenceError` if `methodName` is `null`, `undefined` or an empty string.]*/
[undefined, null, ''].forEach(function(badMethodName) {
it('throws if \'methodParams.methodName\' is \'' + badMethodName + '\'', function() {
var client = new Client({}, {});
assert.throws(function() {
client.invokeDeviceMethod('deviceId', { methodName: badMethodName, payload: { foo: 'bar' }, timeoutInSeconds: 42 }, function() {});
}, ReferenceError);
});
});
/*Tests_SRS_NODE_IOTHUB_CLIENT_16_007: [The `invokeDeviceMethod` method shall throw a `TypeError` if `methodName` is not a `string`.]*/
[{}, function(){}, 42].forEach(function(badMethodType) {
it('throws if \'methodParams.methodName\' is of type \'' + badMethodType + '\'', function() {
var client = new Client({}, {});
assert.throws(function() {
client.invokeDeviceMethod('deviceId', { methodName: badMethodType, payload: { foo: 'bar' }, timeoutInSeconds: 42 }, function() {});
}, TypeError);
});
});
});
describe('invokeDeviceMethod as promise', function() {
it ('Can fulfill a promise when using moduleId and methodParams', function(testCallback) {
var fakeMethodParams = {
methodName: 'method',
payload: null,
timeoutInSeconds: 42
};
var fakeResult = { foo: 'bar' };
var fakeResponse = { statusCode: 200 };
var fakeRestClient = {
executeApiCall: function(method, path, headers, body, timeout, callback) {
callback(null, fakeResult, fakeResponse);
}
};
var client = new Client({}, fakeRestClient);
client.invokeDeviceMethod('fakeDeviceId', 'fakeModuleId', fakeMethodParams).then((promiseResult) => {
assert.strictEqual(promiseResult.result, fakeResult);
assert.strictEqual(promiseResult.message, fakeResponse);
testCallback();
})
.catch((err) => {
assert.fail('promise incorrectly rejected');
});
});
it ('Can reject a promise when using using moduleId and methodParams with promise rejected', function(testCallback) {
var fakeMethodParams = {
methodName: 'method',
payload: null,
timeoutInSeconds: 42
};
var fakeError = new Error('good error');
var fakeResult = { foo: 'bar' };
var fakeResponse = { statusCode: 200 };
var fakeRestClient = {
executeApiCall: function(method, path, headers, body, timeout, callback) {
callback(fakeError);
}
};
var client = new Client({}, fakeRestClient);
client.invokeDeviceMethod('fakeDeviceId', 'fakeModuleId', fakeMethodParams).then((promiseResult) => {
assert.fail('promise incorrectly fulfilled');
})
.catch((err) => {
assert.strictEqual(err, fakeError);
testCallback();
});
});
it ('Can fulfill the promise when only passing a methodParams argument', function(testCallback) {
var fakeMethodParams = {
methodName: 'method',
payload: null,
timeoutInSeconds: 42
};
var fakeResult = { foo: 'bar' };
var fakeResponse = { statusCode: 200 };
var fakeRestClient = {
executeApiCall: function(method, path, headers, body, timeout, callback) {
callback(null, fakeResult, fakeResponse);
}
};
var client = new Client({}, fakeRestClient);
client.invokeDeviceMethod('fakeDeviceId', fakeMethodParams).then((promiseResult) => {
assert.strictEqual(promiseResult.result, fakeResult);
assert.strictEqual(promiseResult.message, fakeResponse);
testCallback();
})
.catch((err) => {
assert.fail('promise incorrectly rejected');
});
});
it ('Can reject the promise when only passing a methodParams argument', function(testCallback) {
var fakeMethodParams = {
methodName: 'method',
payload: null,
timeoutInSeconds: 42
};
var fakeError = new Error('good error');
var fakeResult = { foo: 'bar' };
var fakeResponse = { statusCode: 200 };
var fakeRestClient = {
executeApiCall: function(method, path, headers, body, timeout, callback) {
callback(fakeError);
}
};
var client = new Client({}, fakeRestClient);
client.invokeDeviceMethod('fakeDeviceId', fakeMethodParams).then((promiseResult) => {
assert.fail('promise incorrectly fulfilled');
})
.catch((err) => {
assert.strictEqual(err, fakeError);
testCallback();
});
});
[undefined, null, '', {}, 42].forEach(function(badMethod) {
it ('throws ReferenceError when using moduleId and methodParams is \'' + badMethod + '\'', function(testCallback) {
var client = new Client({}, {});
client.invokeDeviceMethod('fakeDeviceId', 'fakeModuleId', badMethod).then((promiseResult) => {
assert.fail('promise incorrectly fulfilled');
}).catch((err) => {
assert.instanceOf(err, ReferenceError);
testCallback();
});
});
});
[{methodName: 4}].forEach(function(badMethodType) {
it ('throws TypeError when using moduleId and methodParams has type of \'' + badMethodType + '\'', function(testCallback) {
var client = new Client({}, {});
client.invokeDeviceMethod('fakeDeviceId', 'fakeModuleId', badMethodType).then((promiseResult) => {
assert.fail('promise incorrectly fulfilled');
}).catch((err) => {
assert.instanceOf(err, TypeError);
testCallback();
});
});
});
[undefined, null, '', {}, 42].forEach(function(badMethod) {
it ('throws ReferenceError when NOT using moduleId and methodParams has type of \'' + badMethod + '\'', function(testCallback) {
var client = new Client({}, {});
client.invokeDeviceMethod('fakeDeviceId', badMethod).then((promiseResult) => {
assert.fail('promise incorrectly fulfilled');
}).catch((err) => {
assert.instanceOf(err, ReferenceError);
testCallback();
});
});
});
[{methodName: 4}].forEach(function(badMethodType) {
it ('throws TypeError when NOT using moduleId and methodParams has type of \'' + badMethodType + '\'', function(testCallback) {
var client = new Client({}, {});
client.invokeDeviceMethod('fakeDeviceId', badMethodType).then((promiseResult) => {
assert.fail('promise incorrectly fulfilled');
}).catch((err) => {
assert.instanceOf(err, TypeError);
testCallback();
});
});
});
});
[
{ functionUnderTest: function(client, param, callback) { client.invokeDeviceMethod('deviceId', param, callback); } },
{ functionUnderTest: function(client, param, callback) { client.invokeDeviceMethod('deviceId', 'moduleId', param, callback); } },
].forEach(function(testConfig) {
describe('#invokeDeviceMethod', function() {
/*Tests_SRS_NODE_IOTHUB_CLIENT_16_009: [The `invokeDeviceMethod` method shall initialize a new instance of `DeviceMethod` with the `methodName` and `timeout` values passed in the arguments.]*/
/*Tests_SRS_NODE_IOTHUB_CLIENT_16_010: [The `invokeDeviceMethod` method shall use the newly created instance of `DeviceMethod` to invoke the method with the `payload` argument on the device specified with the `deviceid` argument .]*/
/*Tests_SRS_NODE_IOTHUB_CLIENT_16_013: [The `invokeDeviceMethod` method shall call the `done` callback with a `null` first argument, the result of the method execution in the second argument, and the transport-specific response object as a third argument.]*/
/*Tests_SRS_NODE_IOTHUB_CLIENT_18_003: [If `moduleIdOrMethodParams` is a string the `invokeDeviceMethod` method shall call `invokeOnModule` on the new `DeviceMethod` instance. ]*/
it('uses the DeviceMethod client to invoke the method', function(testCallback) {
var fakeMethodParams = {
methodName: 'method',
payload: null,
timeoutInSeconds: 42
};
var fakeResult = { foo: 'bar' };
var fakeResponse = { statusCode: 200 };
var fakeRestClient = {
executeApiCall: function(method, path, headers, body, timeout, callback) {
callback(null, fakeResult, fakeResponse);
}
};
var client = new Client({}, fakeRestClient);
testConfig.functionUnderTest(client, fakeMethodParams, function(err, result, response) {
assert.isNull(err);
assert.equal(result, fakeResult);
assert.equal(response, fakeResponse);
testCallback();
});
});
/*Tests_SRS_NODE_IOTHUB_CLIENT_16_012: [The `invokeDeviceMethod` method shall call the `done` callback with a standard javascript `Error` object if the request failed.]*/
it('works when payload and timeout are omitted', function(testCallback) {
var fakeError = new Error('fake error');
var fakeRestClientFails = {
executeApiCall: function(method, path, headers, body, timeout, callback) {
callback(fakeError);
}
};
var client = new Client({}, fakeRestClientFails);
testConfig.functionUnderTest(client, { methodName: 'method' }, function(err) {
assert.equal(err, fakeError);
testCallback();
});
});
});
});
describe('#open', function() {
/*Tests_SRS_NODE_IOTHUB_CLIENT_16_004: [The `disconnect` event shall be emitted when the client is disconnected from the server.]*/
/*Tests_SRS_NODE_IOTHUB_CLIENT_16_002: [If the transport successfully establishes a connection the `open` method shall subscribe to the `disconnect` event of the transport.]*/
it('subscribes to the \'disconnect\' event once connected', function(done) {
var simulatedAmqp = new SimulatedAmqp();
var client = new Client(simulatedAmqp, {});
client.open(function() {
client.on('disconnect', function() {
done();
});
simulatedAmqp.emit('disconnect');
});
});
/*Tests_SRS_NODE_IOTHUB_CLIENT_05_009: [**When the `open` method completes, the callback function (indicated by the `done` argument) shall be invoked with the following arguments:
- `err` - standard JavaScript `Error` object (or subclass)]*/
it('calls the done callback if passed as argument', function(testCallback) {
var simulatedAmqp = new SimulatedAmqp();
var client = new Client(simulatedAmqp);
client.open(testCallback);
});
/*Tests_SRS_NODE_IOTHUB_CLIENT_16_006: [The `open` method should not throw if the `done` callback is not specified.]*/
it('doesn\'t throw if the done callback is not passed as argument', function() {
var simulatedAmqp = new SimulatedAmqp();
var client = new Client(simulatedAmqp);
assert.doesNotThrow(function() {
client.open();
});
});
/*Tests_SRS_NODE_IOTHUB_CLIENT_05_011: [**Otherwise the argument `err` shall have an `amqpError` property containing implementation-specific response information for use in logging and troubleshooting.]*/
it('calls the done callback with an error if the transport fails to connect', function (testCallback) {
var fakeError = new errors.UnauthorizedError('will not retry');
var fakeTransport = new EventEmitter();
fakeTransport.connect = sinon.stub().callsArgWith(0, fakeError);
var client = new Client(fakeTransport);
client.open(function (err) {
assert.strictEqual(err, fakeError);
testCallback();
});
});
});
describe('#close', function() {
/*Tests_SRS_NODE_IOTHUB_CLIENT_16_003: [The `close` method shall remove the listener that has been attached to the transport `disconnect` event.]*/
it('unsubscribes for the \'disconnect\' event when disconnecting', function(done) {
var simulatedAmqp = new SimulatedAmqp();
var client = new Client(simulatedAmqp, {});
var disconnectReceived = false;
client.open(function() {
client.on('disconnect', function() {
disconnectReceived = true;
});
client.close(function() {
simulatedAmqp.emit('disconnect');
assert.isFalse(disconnectReceived);
done();
});
});
});
/*Tests_SRS_NODE_IOTHUB_CLIENT_05_022: [When the `close` method completes, the callback function (indicated by the done argument) shall be invoked with the following arguments:
- `err` - standard JavaScript `Error` object (or subclass)]*/
it('calls the done callback if passed as argument', function(testCallback) {
var simulatedAmqp = new SimulatedAmqp();
var client = new Client(simulatedAmqp);
client.close(testCallback);
});
/*Tests_SRS_NODE_IOTHUB_CLIENT_16_005: [The `close` method should not throw if the `done` callback is not specified.]*/
it('doesn\'t throw if the done callback is not passed as argument', function() {
var simulatedAmqp = new SimulatedAmqp();
var client = new Client(simulatedAmqp);
assert.doesNotThrow(function() {
client.close();
});
});
/*Tests_SRS_NODE_IOTHUB_CLIENT_05_024: [Otherwise the argument `err` shall have a transport property containing implementation-specific response information for use in logging and troubleshooting.]*/
it('calls the done callback with an error if the transport fails to disconnect', function (testCallback) {
var fakeError = new errors.UnauthorizedError('will not retry');
var fakeTransport = new EventEmitter();
fakeTransport.disconnect = sinon.stub().callsArgWith(0, fakeError);
var client = new Client(fakeTransport);
client.close(function (err) {
assert.strictEqual(err, fakeError);
testCallback();
});
});
});
/*Tests_SRS_NODE_IOTHUB_CLIENT_05_027: [When the `getFeedbackReceiver` method completes, the callback function (indicated by the `done` argument) shall be invoked with the following arguments:
- `err` - standard JavaScript `Error` object (or subclass): `null` if the operation was successful
- `receiver` - an `AmqpReceiver` instance: `undefined` if the operation failed.]*/
/*Tests_SRS_NODE_IOTHUB_CLIENT_16_001: [When the `getFileNotificationReceiver` method completes, the callback function (indicated by the `done` argument) shall be invoked with the following arguments:
- `err` - standard JavaScript `Error` object (or subclass): `null` if the operation was successful
- `receiver` - an `AmqpReceiver` instance: `undefined` if the operation failed.]*/
['getFeedbackReceiver', 'getFileNotificationReceiver'].forEach(function (getReceiverMethod) {
describe(getReceiverMethod, function () {
it('calls ' + getReceiverMethod + ' on the transport', function (testCallback) {
var fakeTransport = new EventEmitter();
fakeTransport[getReceiverMethod] = sinon.stub().callsArgWith(0, null, new EventEmitter());
var client = new Client(fakeTransport);
client[getReceiverMethod](function (err, recv) {
assert.isNull(err);
assert.instanceOf(recv, EventEmitter);
testCallback();
});
});
it('calls its callback with an error if it the transport fails to provide a feedback receiver', function (testCallback) {
var fakeError = new errors.UnauthorizedError('will not retry');
var fakeTransport = new EventEmitter();
fakeTransport[getReceiverMethod] = sinon.stub().callsArgWith(0, fakeError);
var client = new Client(fakeTransport);
client[getReceiverMethod](function (err) {
assert.strictEqual(err, fakeError);
testCallback();
});
});
});
});
describe('setRetryPolicy', function () {
/*Tests_SRS_NODE_IOTHUB_CLIENT_16_027: [The `setRetryPolicy` method shall throw a `ReferenceError` if the `policy` argument is falsy.]*/
[null, undefined].forEach(function (badPolicy) {
it('throws a ReferenceError if the policy is \'' + badPolicy + '\'', function () {
var client = new Client(new EventEmitter());
assert.throws(function () {
client.setRetryPolicy(badPolicy);
}, ReferenceError);
});
});
/*Tests_SRS_NODE_IOTHUB_CLIENT_16_028: [The `setRetryPolicy` method shall throw an `ArgumentError` if the `policy` object does not have a `shouldRetry` method and a `nextRetryTimeout` method.]*/
it('throws an ArgumentError if the policy does not have a shouldRetry method', function () {
var badPolicy = { nextRetryTimeout: function () {} };
var client = new Client(new EventEmitter());
assert.throws(function () {
client.setRetryPolicy(badPolicy);
}, errors.ArgumentError);
});
/*Tests_SRS_NODE_IOTHUB_CLIENT_16_028: [The `setRetryPolicy` method shall throw an `ArgumentError` if the `policy` object does not have a `shouldRetry` method and a `nextRetryTimeout` method.]*/
it('throws an ArgumentError if the policy does not have a nextRetryTimeout method', function () {
var badPolicy = { shouldRetry: function () {} };
var client = new Client(new EventEmitter());
assert.throws(function () {
client.setRetryPolicy(badPolicy);
}, errors.ArgumentError);
});
/*Tests_SRS_NODE_IOTHUB_CLIENT_16_029: [Any operation (e.g. `send`, `getFeedbackReceiver`, etc) initiated after a call to `setRetryPolicy` shall use the policy passed as argument to retry.]*/
it('uses the new retry policy for all subsequent calls', function (testCallback) {
var fakeError = new errors.UnauthorizedError('will not retry');
var fakeRetryPolicy = {
shouldRetry: sinon.stub().returns(true),
nextRetryTimeout: sinon.stub().returns(1)
};
var fakeTransport = new EventEmitter();
fakeTransport.connect = sinon.stub().onFirstCall().callsArgWith(0, fakeError)
.onSecondCall().callsFake(function () {
assert.isTrue(fakeRetryPolicy.shouldRetry.calledOnce);
assert.isTrue(fakeRetryPolicy.shouldRetry.calledOnce);
testCallback();
});
var client = new Client(fakeTransport);
client.setRetryPolicy(fakeRetryPolicy);
client.open(function () {});
});
});
});
var fakeRegistry = {
create: function(device, done) { done(); },
addModule: function(module, done) { done(); },
delete: function(deviceId, done) { done(); }
};
describe('Over simulated AMQP', function () {
var opts = {
transport: function () { return new SimulatedAmqp(); },
connectionString: process.env.IOTHUB_CONNECTION_STRING,
registry: fakeRegistry
};
transportSpecificTests(opts);
});
|
/** @license zlib.js 2012 - imaya [ https://github.com/imaya/zlib.js ] The MIT License */
(function() {'use strict';function i(a){throw a;}var r=void 0,v=!0,aa=this;function y(a,c){var b=a.split("."),e=aa;!(b[0]in e)&&e.execScript&&e.execScript("var "+b[0]);for(var f;b.length&&(f=b.shift());)!b.length&&c!==r?e[f]=c:e=e[f]?e[f]:e[f]={}};var H="undefined"!==typeof Uint8Array&&"undefined"!==typeof Uint16Array&&"undefined"!==typeof Uint32Array;function ba(a){if("string"===typeof a){var c=a.split(""),b,e;b=0;for(e=c.length;b<e;b++)c[b]=(c[b].charCodeAt(0)&255)>>>0;a=c}for(var f=1,d=0,g=a.length,h,m=0;0<g;){h=1024<g?1024:g;g-=h;do f+=a[m++],d+=f;while(--h);f%=65521;d%=65521}return(d<<16|f)>>>0};function J(a,c){this.index="number"===typeof c?c:0;this.i=0;this.buffer=a instanceof(H?Uint8Array:Array)?a:new (H?Uint8Array:Array)(32768);2*this.buffer.length<=this.index&&i(Error("invalid index"));this.buffer.length<=this.index&&this.f()}J.prototype.f=function(){var a=this.buffer,c,b=a.length,e=new (H?Uint8Array:Array)(b<<1);if(H)e.set(a);else for(c=0;c<b;++c)e[c]=a[c];return this.buffer=e};
J.prototype.d=function(a,c,b){var e=this.buffer,f=this.index,d=this.i,g=e[f],h;b&&1<c&&(a=8<c?(N[a&255]<<24|N[a>>>8&255]<<16|N[a>>>16&255]<<8|N[a>>>24&255])>>32-c:N[a]>>8-c);if(8>c+d)g=g<<c|a,d+=c;else for(h=0;h<c;++h)g=g<<1|a>>c-h-1&1,8===++d&&(d=0,e[f++]=N[g],g=0,f===e.length&&(e=this.f()));e[f]=g;this.buffer=e;this.i=d;this.index=f};J.prototype.finish=function(){var a=this.buffer,c=this.index,b;0<this.i&&(a[c]<<=8-this.i,a[c]=N[a[c]],c++);H?b=a.subarray(0,c):(a.length=c,b=a);return b};
var ca=new (H?Uint8Array:Array)(256),ha;for(ha=0;256>ha;++ha){for(var R=ha,ia=R,ja=7,R=R>>>1;R;R>>>=1)ia<<=1,ia|=R&1,--ja;ca[ha]=(ia<<ja&255)>>>0}var N=ca;var ka=[0,1996959894,3993919788,2567524794,124634137,1886057615,3915621685,2657392035,249268274,2044508324,3772115230,2547177864,162941995,2125561021,3887607047,2428444049,498536548,1789927666,4089016648,2227061214,450548861,1843258603,4107580753,2211677639,325883990,1684777152,4251122042,2321926636,335633487,1661365465,4195302755,2366115317,997073096,1281953886,3579855332,2724688242,1006888145,1258607687,3524101629,2768942443,901097722,1119000684,3686517206,2898065728,853044451,1172266101,3705015759,
2882616665,651767980,1373503546,3369554304,3218104598,565507253,1454621731,3485111705,3099436303,671266974,1594198024,3322730930,2970347812,795835527,1483230225,3244367275,3060149565,1994146192,31158534,2563907772,4023717930,1907459465,112637215,2680153253,3904427059,2013776290,251722036,2517215374,3775830040,2137656763,141376813,2439277719,3865271297,1802195444,476864866,2238001368,4066508878,1812370925,453092731,2181625025,4111451223,1706088902,314042704,2344532202,4240017532,1658658271,366619977,
2362670323,4224994405,1303535960,984961486,2747007092,3569037538,1256170817,1037604311,2765210733,3554079995,1131014506,879679996,2909243462,3663771856,1141124467,855842277,2852801631,3708648649,1342533948,654459306,3188396048,3373015174,1466479909,544179635,3110523913,3462522015,1591671054,702138776,2966460450,3352799412,1504918807,783551873,3082640443,3233442989,3988292384,2596254646,62317068,1957810842,3939845945,2647816111,81470997,1943803523,3814918930,2489596804,225274430,2053790376,3826175755,
2466906013,167816743,2097651377,4027552580,2265490386,503444072,1762050814,4150417245,2154129355,426522225,1852507879,4275313526,2312317920,282753626,1742555852,4189708143,2394877945,397917763,1622183637,3604390888,2714866558,953729732,1340076626,3518719985,2797360999,1068828381,1219638859,3624741850,2936675148,906185462,1090812512,3747672003,2825379669,829329135,1181335161,3412177804,3160834842,628085408,1382605366,3423369109,3138078467,570562233,1426400815,3317316542,2998733608,733239954,1555261956,
3268935591,3050360625,752459403,1541320221,2607071920,3965973030,1969922972,40735498,2617837225,3943577151,1913087877,83908371,2512341634,3803740692,2075208622,213261112,2463272603,3855990285,2094854071,198958881,2262029012,4057260610,1759359992,534414190,2176718541,4139329115,1873836001,414664567,2282248934,4279200368,1711684554,285281116,2405801727,4167216745,1634467795,376229701,2685067896,3608007406,1308918612,956543938,2808555105,3495958263,1231636301,1047427035,2932959818,3654703836,1088359270,
936918E3,2847714899,3736837829,1202900863,817233897,3183342108,3401237130,1404277552,615818150,3134207493,3453421203,1423857449,601450431,3009837614,3294710456,1567103746,711928724,3020668471,3272380065,1510334235,755167117];H&&new Uint32Array(ka);function la(a){this.buffer=new (H?Uint16Array:Array)(2*a);this.length=0}la.prototype.getParent=function(a){return 2*((a-2)/4|0)};la.prototype.push=function(a,c){var b,e,f=this.buffer,d;b=this.length;f[this.length++]=c;for(f[this.length++]=a;0<b;)if(e=this.getParent(b),f[b]>f[e])d=f[b],f[b]=f[e],f[e]=d,d=f[b+1],f[b+1]=f[e+1],f[e+1]=d,b=e;else break;return this.length};
la.prototype.pop=function(){var a,c,b=this.buffer,e,f,d;c=b[0];a=b[1];this.length-=2;b[0]=b[this.length];b[1]=b[this.length+1];for(d=0;;){f=2*d+2;if(f>=this.length)break;f+2<this.length&&b[f+2]>b[f]&&(f+=2);if(b[f]>b[d])e=b[d],b[d]=b[f],b[f]=e,e=b[d+1],b[d+1]=b[f+1],b[f+1]=e;else break;d=f}return{index:a,value:c,length:this.length}};function S(a){var c=a.length,b=0,e=Number.POSITIVE_INFINITY,f,d,g,h,m,j,s,n,l;for(n=0;n<c;++n)a[n]>b&&(b=a[n]),a[n]<e&&(e=a[n]);f=1<<b;d=new (H?Uint32Array:Array)(f);g=1;h=0;for(m=2;g<=b;){for(n=0;n<c;++n)if(a[n]===g){j=0;s=h;for(l=0;l<g;++l)j=j<<1|s&1,s>>=1;for(l=j;l<f;l+=m)d[l]=g<<16|n;++h}++g;h<<=1;m<<=1}return[d,b,e]};function ma(a,c){this.h=pa;this.w=0;this.input=a;this.b=0;c&&(c.lazy&&(this.w=c.lazy),"number"===typeof c.compressionType&&(this.h=c.compressionType),c.outputBuffer&&(this.a=H&&c.outputBuffer instanceof Array?new Uint8Array(c.outputBuffer):c.outputBuffer),"number"===typeof c.outputIndex&&(this.b=c.outputIndex));this.a||(this.a=new (H?Uint8Array:Array)(32768))}var pa=2,qa={NONE:0,r:1,j:pa,N:3},ra=[],T;
for(T=0;288>T;T++)switch(v){case 143>=T:ra.push([T+48,8]);break;case 255>=T:ra.push([T-144+400,9]);break;case 279>=T:ra.push([T-256+0,7]);break;case 287>=T:ra.push([T-280+192,8]);break;default:i("invalid literal: "+T)}
ma.prototype.n=function(){var a,c,b,e,f=this.input;switch(this.h){case 0:b=0;for(e=f.length;b<e;){c=H?f.subarray(b,b+65535):f.slice(b,b+65535);b+=c.length;var d=c,g=b===e,h=r,m=r,j=r,s=r,n=r,l=this.a,q=this.b;if(H){for(l=new Uint8Array(this.a.buffer);l.length<=q+d.length+5;)l=new Uint8Array(l.length<<1);l.set(this.a)}h=g?1:0;l[q++]=h|0;m=d.length;j=~m+65536&65535;l[q++]=m&255;l[q++]=m>>>8&255;l[q++]=j&255;l[q++]=j>>>8&255;if(H)l.set(d,q),q+=d.length,l=l.subarray(0,q);else{s=0;for(n=d.length;s<n;++s)l[q++]=
d[s];l.length=q}this.b=q;this.a=l}break;case 1:var E=new J(new Uint8Array(this.a.buffer),this.b);E.d(1,1,v);E.d(1,2,v);var t=sa(this,f),z,K,A;z=0;for(K=t.length;z<K;z++)if(A=t[z],J.prototype.d.apply(E,ra[A]),256<A)E.d(t[++z],t[++z],v),E.d(t[++z],5),E.d(t[++z],t[++z],v);else if(256===A)break;this.a=E.finish();this.b=this.a.length;break;case pa:var x=new J(new Uint8Array(this.a),this.b),B,k,p,D,C,da=[16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15],W,Ma,ea,Na,na,va=Array(19),Oa,$,oa,F,Pa;B=pa;x.d(1,
1,v);x.d(B,2,v);k=sa(this,f);W=ta(this.L,15);Ma=ua(W);ea=ta(this.K,7);Na=ua(ea);for(p=286;257<p&&0===W[p-1];p--);for(D=30;1<D&&0===ea[D-1];D--);var Qa=p,Ra=D,M=new (H?Uint32Array:Array)(Qa+Ra),u,O,w,fa,L=new (H?Uint32Array:Array)(316),I,G,P=new (H?Uint8Array:Array)(19);for(u=O=0;u<Qa;u++)M[O++]=W[u];for(u=0;u<Ra;u++)M[O++]=ea[u];if(!H){u=0;for(fa=P.length;u<fa;++u)P[u]=0}u=I=0;for(fa=M.length;u<fa;u+=O){for(O=1;u+O<fa&&M[u+O]===M[u];++O);w=O;if(0===M[u])if(3>w)for(;0<w--;)L[I++]=0,P[0]++;else for(;0<
w;)G=138>w?w:138,G>w-3&&G<w&&(G=w-3),10>=G?(L[I++]=17,L[I++]=G-3,P[17]++):(L[I++]=18,L[I++]=G-11,P[18]++),w-=G;else if(L[I++]=M[u],P[M[u]]++,w--,3>w)for(;0<w--;)L[I++]=M[u],P[M[u]]++;else for(;0<w;)G=6>w?w:6,G>w-3&&G<w&&(G=w-3),L[I++]=16,L[I++]=G-3,P[16]++,w-=G}a=H?L.subarray(0,I):L.slice(0,I);na=ta(P,7);for(F=0;19>F;F++)va[F]=na[da[F]];for(C=19;4<C&&0===va[C-1];C--);Oa=ua(na);x.d(p-257,5,v);x.d(D-1,5,v);x.d(C-4,4,v);for(F=0;F<C;F++)x.d(va[F],3,v);F=0;for(Pa=a.length;F<Pa;F++)if($=a[F],x.d(Oa[$],
na[$],v),16<=$){F++;switch($){case 16:oa=2;break;case 17:oa=3;break;case 18:oa=7;break;default:i("invalid code: "+$)}x.d(a[F],oa,v)}var Sa=[Ma,W],Ta=[Na,ea],Q,Ua,ga,ya,Va,Wa,Xa,Ya;Va=Sa[0];Wa=Sa[1];Xa=Ta[0];Ya=Ta[1];Q=0;for(Ua=k.length;Q<Ua;++Q)if(ga=k[Q],x.d(Va[ga],Wa[ga],v),256<ga)x.d(k[++Q],k[++Q],v),ya=k[++Q],x.d(Xa[ya],Ya[ya],v),x.d(k[++Q],k[++Q],v);else if(256===ga)break;this.a=x.finish();this.b=this.a.length;break;default:i("invalid compression type")}return this.a};
function wa(a,c){this.length=a;this.G=c}
function xa(){var a=za;switch(v){case 3===a:return[257,a-3,0];case 4===a:return[258,a-4,0];case 5===a:return[259,a-5,0];case 6===a:return[260,a-6,0];case 7===a:return[261,a-7,0];case 8===a:return[262,a-8,0];case 9===a:return[263,a-9,0];case 10===a:return[264,a-10,0];case 12>=a:return[265,a-11,1];case 14>=a:return[266,a-13,1];case 16>=a:return[267,a-15,1];case 18>=a:return[268,a-17,1];case 22>=a:return[269,a-19,2];case 26>=a:return[270,a-23,2];case 30>=a:return[271,a-27,2];case 34>=a:return[272,a-
31,2];case 42>=a:return[273,a-35,3];case 50>=a:return[274,a-43,3];case 58>=a:return[275,a-51,3];case 66>=a:return[276,a-59,3];case 82>=a:return[277,a-67,4];case 98>=a:return[278,a-83,4];case 114>=a:return[279,a-99,4];case 130>=a:return[280,a-115,4];case 162>=a:return[281,a-131,5];case 194>=a:return[282,a-163,5];case 226>=a:return[283,a-195,5];case 257>=a:return[284,a-227,5];case 258===a:return[285,a-258,0];default:i("invalid length: "+a)}}var Aa=[],za,Ba;
for(za=3;258>=za;za++)Ba=xa(),Aa[za]=Ba[2]<<24|Ba[1]<<16|Ba[0];var Ca=H?new Uint32Array(Aa):Aa;
function sa(a,c){function b(a,c){var b=a.G,d=[],e=0,f;f=Ca[a.length];d[e++]=f&65535;d[e++]=f>>16&255;d[e++]=f>>24;var g;switch(v){case 1===b:g=[0,b-1,0];break;case 2===b:g=[1,b-2,0];break;case 3===b:g=[2,b-3,0];break;case 4===b:g=[3,b-4,0];break;case 6>=b:g=[4,b-5,1];break;case 8>=b:g=[5,b-7,1];break;case 12>=b:g=[6,b-9,2];break;case 16>=b:g=[7,b-13,2];break;case 24>=b:g=[8,b-17,3];break;case 32>=b:g=[9,b-25,3];break;case 48>=b:g=[10,b-33,4];break;case 64>=b:g=[11,b-49,4];break;case 96>=b:g=[12,b-
65,5];break;case 128>=b:g=[13,b-97,5];break;case 192>=b:g=[14,b-129,6];break;case 256>=b:g=[15,b-193,6];break;case 384>=b:g=[16,b-257,7];break;case 512>=b:g=[17,b-385,7];break;case 768>=b:g=[18,b-513,8];break;case 1024>=b:g=[19,b-769,8];break;case 1536>=b:g=[20,b-1025,9];break;case 2048>=b:g=[21,b-1537,9];break;case 3072>=b:g=[22,b-2049,10];break;case 4096>=b:g=[23,b-3073,10];break;case 6144>=b:g=[24,b-4097,11];break;case 8192>=b:g=[25,b-6145,11];break;case 12288>=b:g=[26,b-8193,12];break;case 16384>=
b:g=[27,b-12289,12];break;case 24576>=b:g=[28,b-16385,13];break;case 32768>=b:g=[29,b-24577,13];break;default:i("invalid distance")}f=g;d[e++]=f[0];d[e++]=f[1];d[e++]=f[2];var h,j;h=0;for(j=d.length;h<j;++h)l[q++]=d[h];t[d[0]]++;z[d[3]]++;E=a.length+c-1;n=null}var e,f,d,g,h,m={},j,s,n,l=H?new Uint16Array(2*c.length):[],q=0,E=0,t=new (H?Uint32Array:Array)(286),z=new (H?Uint32Array:Array)(30),K=a.w,A;if(!H){for(d=0;285>=d;)t[d++]=0;for(d=0;29>=d;)z[d++]=0}t[256]=1;e=0;for(f=c.length;e<f;++e){d=h=0;
for(g=3;d<g&&e+d!==f;++d)h=h<<8|c[e+d];m[h]===r&&(m[h]=[]);j=m[h];if(!(0<E--)){for(;0<j.length&&32768<e-j[0];)j.shift();if(e+3>=f){n&&b(n,-1);d=0;for(g=f-e;d<g;++d)A=c[e+d],l[q++]=A,++t[A];break}if(0<j.length){var x=r,B=r,k=0,p=r,D=r,C=r,da=r,W=c.length,D=0,da=j.length;a:for(;D<da;D++){x=j[da-D-1];p=3;if(3<k){for(C=k;3<C;C--)if(c[x+C-1]!==c[e+C-1])continue a;p=k}for(;258>p&&e+p<W&&c[x+p]===c[e+p];)++p;p>k&&(B=x,k=p);if(258===p)break}s=new wa(k,e-B);n?n.length<s.length?(A=c[e-1],l[q++]=A,++t[A],b(s,
0)):b(n,-1):s.length<K?n=s:b(s,0)}else n?b(n,-1):(A=c[e],l[q++]=A,++t[A])}j.push(e)}l[q++]=256;t[256]++;a.L=t;a.K=z;return H?l.subarray(0,q):l}
function ta(a,c){function b(a){var c=z[a][K[a]];c===n?(b(a+1),b(a+1)):--E[c];++K[a]}var e=a.length,f=new la(572),d=new (H?Uint8Array:Array)(e),g,h,m,j,s;if(!H)for(j=0;j<e;j++)d[j]=0;for(j=0;j<e;++j)0<a[j]&&f.push(j,a[j]);g=Array(f.length/2);h=new (H?Uint32Array:Array)(f.length/2);if(1===g.length)return d[f.pop().index]=1,d;j=0;for(s=f.length/2;j<s;++j)g[j]=f.pop(),h[j]=g[j].value;var n=h.length,l=new (H?Uint16Array:Array)(c),q=new (H?Uint8Array:Array)(c),E=new (H?Uint8Array:Array)(n),t=Array(c),z=
Array(c),K=Array(c),A=(1<<c)-n,x=1<<c-1,B,k,p,D,C;l[c-1]=n;for(k=0;k<c;++k)A<x?q[k]=0:(q[k]=1,A-=x),A<<=1,l[c-2-k]=(l[c-1-k]/2|0)+n;l[0]=q[0];t[0]=Array(l[0]);z[0]=Array(l[0]);for(k=1;k<c;++k)l[k]>2*l[k-1]+q[k]&&(l[k]=2*l[k-1]+q[k]),t[k]=Array(l[k]),z[k]=Array(l[k]);for(B=0;B<n;++B)E[B]=c;for(p=0;p<l[c-1];++p)t[c-1][p]=h[p],z[c-1][p]=p;for(B=0;B<c;++B)K[B]=0;1===q[c-1]&&(--E[0],++K[c-1]);for(k=c-2;0<=k;--k){D=B=0;C=K[k+1];for(p=0;p<l[k];p++)D=t[k+1][C]+t[k+1][C+1],D>h[B]?(t[k][p]=D,z[k][p]=n,C+=2):
(t[k][p]=h[B],z[k][p]=B,++B);K[k]=0;1===q[k]&&b(k)}m=E;j=0;for(s=g.length;j<s;++j)d[g[j].index]=m[j];return d}function ua(a){var c=new (H?Uint16Array:Array)(a.length),b=[],e=[],f=0,d,g,h,m;d=0;for(g=a.length;d<g;d++)b[a[d]]=(b[a[d]]|0)+1;d=1;for(g=16;d<=g;d++)e[d]=f,f+=b[d]|0,f>1<<d&&i("overcommitted"),f<<=1;65536>f&&i("undercommitted");d=0;for(g=a.length;d<g;d++){f=e[a[d]];e[a[d]]+=1;h=c[d]=0;for(m=a[d];h<m;h++)c[d]=c[d]<<1|f&1,f>>>=1}return c};function Da(a,c){this.input=a;this.a=new (H?Uint8Array:Array)(32768);this.h=U.j;var b={},e;if((c||!(c={}))&&"number"===typeof c.compressionType)this.h=c.compressionType;for(e in c)b[e]=c[e];b.outputBuffer=this.a;this.z=new ma(this.input,b)}var U=qa;
Da.prototype.n=function(){var a,c,b,e,f,d,g,h=0;g=this.a;a=Ea;switch(a){case Ea:c=Math.LOG2E*Math.log(32768)-8;break;default:i(Error("invalid compression method"))}b=c<<4|a;g[h++]=b;switch(a){case Ea:switch(this.h){case U.NONE:f=0;break;case U.r:f=1;break;case U.j:f=2;break;default:i(Error("unsupported compression type"))}break;default:i(Error("invalid compression method"))}e=f<<6|0;g[h++]=e|31-(256*b+e)%31;d=ba(this.input);this.z.b=h;g=this.z.n();h=g.length;H&&(g=new Uint8Array(g.buffer),g.length<=
h+4&&(this.a=new Uint8Array(g.length+4),this.a.set(g),g=this.a),g=g.subarray(0,h+4));g[h++]=d>>24&255;g[h++]=d>>16&255;g[h++]=d>>8&255;g[h++]=d&255;return g};y("Zlib.Deflate",Da);y("Zlib.Deflate.compress",function(a,c){return(new Da(a,c)).n()});y("Zlib.Deflate.CompressionType",U);y("Zlib.Deflate.CompressionType.NONE",U.NONE);y("Zlib.Deflate.CompressionType.FIXED",U.r);y("Zlib.Deflate.CompressionType.DYNAMIC",U.j);function V(a,c){this.k=[];this.l=32768;this.e=this.g=this.c=this.q=0;this.input=H?new Uint8Array(a):a;this.s=!1;this.m=Fa;this.B=!1;if(c||!(c={}))c.index&&(this.c=c.index),c.bufferSize&&(this.l=c.bufferSize),c.bufferType&&(this.m=c.bufferType),c.resize&&(this.B=c.resize);switch(this.m){case Ga:this.b=32768;this.a=new (H?Uint8Array:Array)(32768+this.l+258);break;case Fa:this.b=0;this.a=new (H?Uint8Array:Array)(this.l);this.f=this.J;this.t=this.H;this.o=this.I;break;default:i(Error("invalid inflate mode"))}}
var Ga=0,Fa=1,Ha={D:Ga,C:Fa};
V.prototype.p=function(){for(;!this.s;){var a=X(this,3);a&1&&(this.s=v);a>>>=1;switch(a){case 0:var c=this.input,b=this.c,e=this.a,f=this.b,d=r,g=r,h=r,m=e.length,j=r;this.e=this.g=0;d=c[b++];d===r&&i(Error("invalid uncompressed block header: LEN (first byte)"));g=d;d=c[b++];d===r&&i(Error("invalid uncompressed block header: LEN (second byte)"));g|=d<<8;d=c[b++];d===r&&i(Error("invalid uncompressed block header: NLEN (first byte)"));h=d;d=c[b++];d===r&&i(Error("invalid uncompressed block header: NLEN (second byte)"));h|=
d<<8;g===~h&&i(Error("invalid uncompressed block header: length verify"));b+g>c.length&&i(Error("input buffer is broken"));switch(this.m){case Ga:for(;f+g>e.length;){j=m-f;g-=j;if(H)e.set(c.subarray(b,b+j),f),f+=j,b+=j;else for(;j--;)e[f++]=c[b++];this.b=f;e=this.f();f=this.b}break;case Fa:for(;f+g>e.length;)e=this.f({v:2});break;default:i(Error("invalid inflate mode"))}if(H)e.set(c.subarray(b,b+g),f),f+=g,b+=g;else for(;g--;)e[f++]=c[b++];this.c=b;this.b=f;this.a=e;break;case 1:this.o(Ia,Ja);break;
case 2:Ka(this);break;default:i(Error("unknown BTYPE: "+a))}}return this.t()};
var La=[16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15],Za=H?new Uint16Array(La):La,$a=[3,4,5,6,7,8,9,10,11,13,15,17,19,23,27,31,35,43,51,59,67,83,99,115,131,163,195,227,258,258,258],ab=H?new Uint16Array($a):$a,bb=[0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0,0,0],cb=H?new Uint8Array(bb):bb,db=[1,2,3,4,5,7,9,13,17,25,33,49,65,97,129,193,257,385,513,769,1025,1537,2049,3073,4097,6145,8193,12289,16385,24577],eb=H?new Uint16Array(db):db,fb=[0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,
10,11,11,12,12,13,13],gb=H?new Uint8Array(fb):fb,hb=new (H?Uint8Array:Array)(288),Y,ib;Y=0;for(ib=hb.length;Y<ib;++Y)hb[Y]=143>=Y?8:255>=Y?9:279>=Y?7:8;var Ia=S(hb),jb=new (H?Uint8Array:Array)(30),kb,lb;kb=0;for(lb=jb.length;kb<lb;++kb)jb[kb]=5;var Ja=S(jb);function X(a,c){for(var b=a.g,e=a.e,f=a.input,d=a.c,g;e<c;)g=f[d++],g===r&&i(Error("input buffer is broken")),b|=g<<e,e+=8;g=b&(1<<c)-1;a.g=b>>>c;a.e=e-c;a.c=d;return g}
function mb(a,c){for(var b=a.g,e=a.e,f=a.input,d=a.c,g=c[0],h=c[1],m,j,s;e<h;)m=f[d++],m===r&&i(Error("input buffer is broken")),b|=m<<e,e+=8;j=g[b&(1<<h)-1];s=j>>>16;a.g=b>>s;a.e=e-s;a.c=d;return j&65535}
function Ka(a){function c(a,b,c){var d,e,f,g;for(g=0;g<a;)switch(d=mb(this,b),d){case 16:for(f=3+X(this,2);f--;)c[g++]=e;break;case 17:for(f=3+X(this,3);f--;)c[g++]=0;e=0;break;case 18:for(f=11+X(this,7);f--;)c[g++]=0;e=0;break;default:e=c[g++]=d}return c}var b=X(a,5)+257,e=X(a,5)+1,f=X(a,4)+4,d=new (H?Uint8Array:Array)(Za.length),g,h,m,j;for(j=0;j<f;++j)d[Za[j]]=X(a,3);g=S(d);h=new (H?Uint8Array:Array)(b);m=new (H?Uint8Array:Array)(e);a.o(S(c.call(a,b,g,h)),S(c.call(a,e,g,m)))}
V.prototype.o=function(a,c){var b=this.a,e=this.b;this.u=a;for(var f=b.length-258,d,g,h,m;256!==(d=mb(this,a));)if(256>d)e>=f&&(this.b=e,b=this.f(),e=this.b),b[e++]=d;else{g=d-257;m=ab[g];0<cb[g]&&(m+=X(this,cb[g]));d=mb(this,c);h=eb[d];0<gb[d]&&(h+=X(this,gb[d]));e>=f&&(this.b=e,b=this.f(),e=this.b);for(;m--;)b[e]=b[e++-h]}for(;8<=this.e;)this.e-=8,this.c--;this.b=e};
V.prototype.I=function(a,c){var b=this.a,e=this.b;this.u=a;for(var f=b.length,d,g,h,m;256!==(d=mb(this,a));)if(256>d)e>=f&&(b=this.f(),f=b.length),b[e++]=d;else{g=d-257;m=ab[g];0<cb[g]&&(m+=X(this,cb[g]));d=mb(this,c);h=eb[d];0<gb[d]&&(h+=X(this,gb[d]));e+m>f&&(b=this.f(),f=b.length);for(;m--;)b[e]=b[e++-h]}for(;8<=this.e;)this.e-=8,this.c--;this.b=e};
V.prototype.f=function(){var a=new (H?Uint8Array:Array)(this.b-32768),c=this.b-32768,b,e,f=this.a;if(H)a.set(f.subarray(32768,a.length));else{b=0;for(e=a.length;b<e;++b)a[b]=f[b+32768]}this.k.push(a);this.q+=a.length;if(H)f.set(f.subarray(c,c+32768));else for(b=0;32768>b;++b)f[b]=f[c+b];this.b=32768;return f};
V.prototype.J=function(a){var c,b=this.input.length/this.c+1|0,e,f,d,g=this.input,h=this.a;a&&("number"===typeof a.v&&(b=a.v),"number"===typeof a.F&&(b+=a.F));2>b?(e=(g.length-this.c)/this.u[2],d=258*(e/2)|0,f=d<h.length?h.length+d:h.length<<1):f=h.length*b;H?(c=new Uint8Array(f),c.set(h)):c=h;return this.a=c};
V.prototype.t=function(){var a=0,c=this.a,b=this.k,e,f=new (H?Uint8Array:Array)(this.q+(this.b-32768)),d,g,h,m;if(0===b.length)return H?this.a.subarray(32768,this.b):this.a.slice(32768,this.b);d=0;for(g=b.length;d<g;++d){e=b[d];h=0;for(m=e.length;h<m;++h)f[a++]=e[h]}d=32768;for(g=this.b;d<g;++d)f[a++]=c[d];this.k=[];return this.buffer=f};
V.prototype.H=function(){var a,c=this.b;H?this.B?(a=new Uint8Array(c),a.set(this.a.subarray(0,c))):a=this.a.subarray(0,c):(this.a.length>c&&(this.a.length=c),a=this.a);return this.buffer=a};function nb(a,c){var b,e;this.input=a;this.c=0;if(c||!(c={}))c.index&&(this.c=c.index),c.verify&&(this.M=c.verify);b=a[this.c++];e=a[this.c++];switch(b&15){case Ea:this.method=Ea;break;default:i(Error("unsupported compression method"))}0!==((b<<8)+e)%31&&i(Error("invalid fcheck flag:"+((b<<8)+e)%31));e&32&&i(Error("fdict flag is not supported"));this.A=new V(a,{index:this.c,bufferSize:c.bufferSize,bufferType:c.bufferType,resize:c.resize})}
nb.prototype.p=function(){var a=this.input,c,b;c=this.A.p();this.c=this.A.c;this.M&&(b=(a[this.c++]<<24|a[this.c++]<<16|a[this.c++]<<8|a[this.c++])>>>0,b!==ba(c)&&i(Error("invalid adler-32 checksum")));return c};y("Zlib.Inflate",nb);y("Zlib.Inflate.BufferType",Ha);Ha.ADAPTIVE=Ha.C;Ha.BLOCK=Ha.D;y("Zlib.Inflate.prototype.decompress",nb.prototype.p);var ob=[16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15];H&&new Uint16Array(ob);var pb=[3,4,5,6,7,8,9,10,11,13,15,17,19,23,27,31,35,43,51,59,67,83,99,115,131,163,195,227,258,258,258];H&&new Uint16Array(pb);var qb=[0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0,0,0];H&&new Uint8Array(qb);var rb=[1,2,3,4,5,7,9,13,17,25,33,49,65,97,129,193,257,385,513,769,1025,1537,2049,3073,4097,6145,8193,12289,16385,24577];H&&new Uint16Array(rb);
var sb=[0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13];H&&new Uint8Array(sb);var tb=new (H?Uint8Array:Array)(288),Z,ub;Z=0;for(ub=tb.length;Z<ub;++Z)tb[Z]=143>=Z?8:255>=Z?9:279>=Z?7:8;S(tb);var vb=new (H?Uint8Array:Array)(30),wb,xb;wb=0;for(xb=vb.length;wb<xb;++wb)vb[wb]=5;S(vb);var Ea=8;}).call(window);
//add for cocos2d-html5
var _p = window;
_p = window.Zlib = _p["Zlib"];
_p.Deflate = _p["Deflate"];
_p.Deflate.compress = _p.Deflate["compress"];
_p.Inflate = _p["Inflate"];
_p.Inflate.BufferType = _p.Inflate["BufferType"];
_p.Inflate.prototype.decompress = _p.Inflate.prototype["decompress"];
|
const ADD_MESSAGE = 'ADD_MESSAGE'
const FETCH_MESSAGE = 'FETCH_MESSAGE'
const UPDATE_MESSAGE = 'UPDATE_MESSAGE'
const DELETE_MESSAGE = 'DELETE_MESSAGE'
const FETCH_ALL_CONTACT_MESSAGES = 'FETCH_ALL_CONTACT_MESSAGES'
export {
ADD_MESSAGE,
FETCH_MESSAGE,
UPDATE_MESSAGE,
DELETE_MESSAGE,
FETCH_ALL_CONTACT_MESSAGES
}
|
/* eslint-disable */
var icon = require('vue-svgicon')
icon.register({
'dealerships/entertainment': {
width: 32,
height: 32,
viewBox: '0 0 32 32',
data: '<path pid="0" d="M15.868 19.874h7.463c.422 0 .643.526.332.811-1.074.977-2.53 1.589-4.063 1.589s-2.989-.612-4.063-1.589c-.312-.286-.091-.811.331-.811zm-7.668-2.4c.961 0 1.8.839 1.8 1.8s-.839 1.8-1.8 1.8c-.961 0-1.8-.839-1.8-1.8s.839-1.8 1.8-1.8zm5.513-2.4h11.774c.122.361.113.839.113 1.2s.01.839-.113 1.2H13.713c-.122-.361-.113-.839-.113-1.2s-.01-.839.113-1.2zm-5.513-3.6c.961 0 1.8.84 1.8 1.8 0 .961-.839 1.8-1.8 1.8s-1.8-.839-1.8-1.8c0-.96.839-1.8 1.8-1.8zm11.4-1.2c1.534 0 2.989.612 4.063 1.59.311.284.09.81-.332.81h-7.463c-.422 0-.643-.526-.331-.81a6.087 6.087 0 014.063-1.59zM4 9.549v13.454a2.878 2.878 0 002.869 2.87h18.26a2.878 2.878 0 002.87-2.87V9.544a2.878 2.878 0 00-2.869-2.87l-12.673.002c-.54.025-3.347.018-5.581.007A2.863 2.863 0 004 9.549z"/>'
}
})
|
import { h, Component } from 'preact';
import { Router } from 'preact-router';
import Helmet from 'preact-helmet';
import AppLayout from '../app-layout';
import DrawerFrame from '../../routes/drawer-frame';
import TopAppBarFrame from '../../routes/top-app-bar-frame';
if (module.hot) {
require('preact/debug');
}
function scrollTop() {
if (typeof window !== 'undefined') {
document.documentElement.scrollTop = 0;
}
}
export default class App extends Component {
handleChange = e => {
if (this.currentUrl !== e.url) {
this.currentUrl = e.url;
scrollTop();
}
};
render() {
return (
<div className="mdc-typography">
<Helmet
defaultTitle="Preact Material Components for the Web"
titleTemplate="%s - Preact Material Components for the Web"
meta={[
{
property: 'og:site_name',
content: 'Preact Material Components'
}
]}
/>
<Router onChange={this.handleChange}>
<TopAppBarFrame path="/top-app-bar/frame/:variant?" />
<DrawerFrame path="/drawer/frame/:variant" />
<AppLayout path="/:component?" />
</Router>
</div>
);
}
}
|
// Copyright (c) 2011-2017 The Cryptonote developers
// Copyright (c) 2017-2018 The Circle Foundation & Conceal Devs
// Copyright (c) 2018-2021 Conceal Network & Conceal Devs
// Distributed under the MIT/X11 software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#pragma once
#ifndef __cplusplus
#define alloca(size) _alloca(size)
#endif
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functional tests for fused batch norm operations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.compiler.tests.xla_test import XLATestCase
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_nn_ops
from tensorflow.python.ops import gradient_checker
from tensorflow.python.ops import nn
from tensorflow.python.platform import test
class FusedBatchNormTest(XLATestCase):
def _reference_training(self, x, scale, offset, epsilon, data_format):
if data_format != "NHWC":
raise ValueError("data_format must be NHWC, got %s." % data_format)
x_square = x * x
x_square_sum = np.sum(x_square, (0, 1, 2))
x_sum = np.sum(x, axis=(0, 1, 2))
element_count = np.size(x) / int(np.shape(x)[0])
mean = x_sum / element_count
var = x_square_sum / element_count - mean * mean
normalized = (x - mean) / np.sqrt(var + epsilon)
return (normalized * scale + offset), mean, var
def _reference_grad(self, x, grad_y, scale, mean, var, epsilon, data_format):
# Use the following formulas to calculate gradients:
# grad_scale =
# sum(grad_y * (x - mean)) * rsqrt(var + epsilon)
#
# grad_offset = sum(output_y)
#
# grad_x =
# 1/N * scale * rsqrt(var + epsilon) * (N * grad_y - sum(grad_y) -
# (x - mean) * sum(grad_y * (x - mean)) / (var + epsilon))
if data_format != "NHWC":
raise ValueError("data_format must be NHWC, got %s." % data_format)
grad_x = scale * (grad_y - np.mean(grad_y, axis=(0, 1, 2)) -
(x - mean) * np.mean(grad_y *
(x - mean), axis=(0, 1, 2)) /
(var + epsilon)) / np.sqrt(var + epsilon)
grad_scale = np.sum(
grad_y * (x - mean) / np.sqrt(var + epsilon), axis=(0, 1, 2))
grad_offset = np.sum(grad_y, axis=(0, 1, 2))
return grad_x, grad_scale, grad_offset
def _testLearning(self, use_gradient_checker):
x_shape = [2, 2, 6, 2]
scale_shape = [2]
x_val = np.random.random_sample(x_shape).astype(np.float32)
scale_val = np.random.random_sample(scale_shape).astype(np.float32)
offset_val = np.random.random_sample(scale_shape).astype(np.float32)
mean_val = np.random.random_sample(scale_shape).astype(np.float32)
var_val = np.random.random_sample(scale_shape).astype(np.float32)
data_format = "NHWC"
with self.test_session() as sess, self.test_scope():
# To avoid constant folding
t_val = array_ops.placeholder(np.float32, shape=x_shape, name="x")
scale = array_ops.placeholder(np.float32, shape=[2], name="scale")
offset = array_ops.placeholder(np.float32, shape=[2], name="offset")
epsilon = 0.001
y, mean, var = nn.fused_batch_norm(
t_val,
scale,
offset,
mean=None,
variance=None,
epsilon=epsilon,
data_format=data_format,
is_training=True)
# Check gradient.
if use_gradient_checker:
err = gradient_checker.compute_gradient_error(
t_val,
x_shape,
y,
x_shape,
extra_feed_dict={
t_val: x_val,
scale: scale_val,
offset: offset_val
})
self.assertLess(err, 1e-3)
y_val, mean_val, var_val = sess.run(
[y, mean, var], {t_val: x_val,
scale: scale_val,
offset: offset_val})
y_ref, mean_ref, var_ref = self._reference_training(
x_val, scale_val, offset_val, epsilon, data_format)
self.assertAllClose(mean_val, mean_ref, atol=1e-3)
self.assertAllClose(y_val, y_ref, atol=1e-3)
self.assertAllClose(var_val, var_ref, atol=1e-3)
def testLearning(self):
self._testLearning(False)
def testLearningWithGradientChecker(self):
self._testLearning(True)
def testGradient(self):
# TODO(b/64270657): Use gradient_checker here in addition to comparing with
# this reference implementation.
x_shape = [2, 2, 6, 2]
scale_shape = [2]
grad_val = np.random.random_sample(x_shape).astype(np.float32)
x_val = np.random.random_sample(x_shape).astype(np.float32)
scale_val = np.random.random_sample(scale_shape).astype(np.float32)
mean_val = np.random.random_sample(scale_shape).astype(np.float32)
var_val = np.random.random_sample(scale_shape).astype(np.float32)
epsilon = 0.001
with self.test_session() as sess, self.test_scope():
grad = array_ops.placeholder(np.float32, shape=x_shape, name="grad")
x = array_ops.placeholder(np.float32, shape=x_shape, name="x")
mean = array_ops.placeholder(np.float32, shape=scale_shape, name="mean")
var = array_ops.placeholder(np.float32, shape=scale_shape, name="var")
scale = array_ops.placeholder(np.float32, shape=scale_shape, name="scale")
grad_x, grad_scale, grad_offset, _, _ = gen_nn_ops.fused_batch_norm_grad(
grad, x, scale, mean, var, data_format="NHWC")
grad_x_val, grad_scale_val, grad_offset_val = sess.run(
[grad_x, grad_scale, grad_offset], {
grad: grad_val,
x: x_val,
mean: mean_val,
var: var_val,
scale: scale_val
})
grad_x_ref, grad_scale_ref, grad_offset_ref = self._reference_grad(
x_val, grad_val, scale_val, mean_val, var_val, epsilon, "NHWC")
self.assertAllClose(grad_x_val, grad_x_ref, atol=1e-2)
self.assertAllClose(grad_scale_val, grad_scale_ref, atol=1e-2)
self.assertAllClose(grad_offset_val, grad_offset_ref, atol=1e-3)
if __name__ == "__main__":
test.main()
|
ace.define("ace/ext/menu_tools/element_generator",["require","exports","module"], function(require, exports, module) {
'use strict';
module.exports.createOption = function createOption (obj) {
var attribute;
var el = document.createElement('option');
for(attribute in obj) {
if(obj.hasOwnProperty(attribute)) {
if(attribute === 'selected') {
el.setAttribute(attribute, obj[attribute]);
} else {
el[attribute] = obj[attribute];
}
}
}
return el;
};
module.exports.createCheckbox = function createCheckbox (id, checked, clss) {
var el = document.createElement('input');
el.setAttribute('type', 'checkbox');
el.setAttribute('id', id);
el.setAttribute('name', id);
el.setAttribute('value', checked);
el.setAttribute('class', clss);
if(checked) {
el.setAttribute('checked', 'checked');
}
return el;
};
module.exports.createInput = function createInput (id, value, clss) {
var el = document.createElement('input');
el.setAttribute('type', 'text');
el.setAttribute('id', id);
el.setAttribute('name', id);
el.setAttribute('value', value);
el.setAttribute('class', clss);
return el;
};
module.exports.createLabel = function createLabel (text, labelFor) {
var el = document.createElement('label');
el.setAttribute('for', labelFor);
el.textContent = text;
return el;
};
module.exports.createSelection = function createSelection (id, values, clss) {
var el = document.createElement('select');
el.setAttribute('id', id);
el.setAttribute('name', id);
el.setAttribute('class', clss);
values.forEach(function(item) {
el.appendChild(module.exports.createOption(item));
});
return el;
};
});
ace.define("ace/ext/modelist",["require","exports","module"], function(require, exports, module) {
"use strict";
var modes = [];
function getModeForPath(path) {
var mode = modesByName.text;
var fileName = path.split(/[\/\\]/).pop();
for (var i = 0; i < modes.length; i++) {
if (modes[i].supportsFile(fileName)) {
mode = modes[i];
break;
}
}
return mode;
}
var Mode = function(name, caption, extensions) {
this.name = name;
this.caption = caption;
this.mode = "ace/mode/" + name;
this.extensions = extensions;
if (/\^/.test(extensions)) {
var re = extensions.replace(/\|(\^)?/g, function(a, b){
return "$|" + (b ? "^" : "^.*\\.");
}) + "$";
} else {
var re = "^.*\\.(" + extensions + ")$";
}
this.extRe = new RegExp(re, "gi");
};
Mode.prototype.supportsFile = function(filename) {
return filename.match(this.extRe);
};
var supportedModes = {
ABAP: ["abap"],
ABC: ["abc"],
ActionScript:["as"],
ADA: ["ada|adb"],
Apache_Conf: ["^htaccess|^htgroups|^htpasswd|^conf|htaccess|htgroups|htpasswd"],
AsciiDoc: ["asciidoc|adoc"],
Assembly_x86:["asm|a"],
AutoHotKey: ["ahk"],
BatchFile: ["bat|cmd"],
C_Cpp: ["cpp|c|cc|cxx|h|hh|hpp|ino"],
C9Search: ["c9search_results"],
Cirru: ["cirru|cr"],
Clojure: ["clj|cljs"],
Cobol: ["CBL|COB"],
coffee: ["coffee|cf|cson|^Cakefile"],
ColdFusion: ["cfm"],
CSharp: ["cs"],
CSS: ["css"],
Curly: ["curly"],
D: ["d|di"],
Dart: ["dart"],
Diff: ["diff|patch"],
Dockerfile: ["^Dockerfile"],
Dot: ["dot"],
Dummy: ["dummy"],
DummySyntax: ["dummy"],
Eiffel: ["e|ge"],
EJS: ["ejs"],
Elixir: ["ex|exs"],
Elm: ["elm"],
Erlang: ["erl|hrl"],
Forth: ["frt|fs|ldr"],
FTL: ["ftl"],
Gcode: ["gcode"],
Gherkin: ["feature"],
Gitignore: ["^.gitignore"],
Glsl: ["glsl|frag|vert"],
golang: ["go"],
Groovy: ["groovy"],
HAML: ["haml"],
Handlebars: ["hbs|handlebars|tpl|mustache"],
Haskell: ["hs"],
haXe: ["hx"],
HTML: ["html|htm|xhtml"],
HTML_Ruby: ["erb|rhtml|html.erb"],
INI: ["ini|conf|cfg|prefs"],
Io: ["io"],
Jack: ["jack"],
Jade: ["jade"],
Java: ["java"],
JavaScript: ["js|jsm"],
JSON: ["json"],
JSONiq: ["jq"],
JSP: ["jsp"],
JSX: ["jsx"],
Julia: ["jl"],
LaTeX: ["tex|latex|ltx|bib"],
Lean: ["lean|hlean"],
LESS: ["less"],
Liquid: ["liquid"],
Lisp: ["lisp"],
LiveScript: ["ls"],
LogiQL: ["logic|lql"],
LSL: ["lsl"],
Lua: ["lua"],
LuaPage: ["lp"],
Lucene: ["lucene"],
Makefile: ["^Makefile|^GNUmakefile|^makefile|^OCamlMakefile|make"],
Markdown: ["md|markdown"],
Mask: ["mask"],
MATLAB: ["matlab"],
Maze: ["mz"],
MEL: ["mel"],
MUSHCode: ["mc|mush"],
MySQL: ["mysql"],
Nix: ["nix"],
ObjectiveC: ["m|mm"],
OCaml: ["ml|mli"],
Pascal: ["pas|p"],
Perl: ["pl|pm"],
pgSQL: ["pgsql"],
PHP: ["php|phtml|shtml|php3|php4|php5|phps|phpt|aw|ctp"],
Powershell: ["ps1"],
Praat: ["praat|praatscript|psc|proc"],
Prolog: ["plg|prolog"],
Properties: ["properties"],
Protobuf: ["proto"],
Python: ["py"],
R: ["r"],
RDoc: ["Rd"],
RHTML: ["Rhtml"],
Ruby: ["rb|ru|gemspec|rake|^Guardfile|^Rakefile|^Gemfile"],
Rust: ["rs"],
SASS: ["sass"],
SCAD: ["scad"],
Scala: ["scala"],
Scheme: ["scm|sm|rkt|oak|scheme"],
SCSS: ["scss"],
SH: ["sh|bash|^.bashrc"],
SJS: ["sjs"],
Smarty: ["smarty|tpl"],
snippets: ["snippets"],
Soy_Template:["soy"],
Space: ["space"],
SQL: ["sql"],
SQLServer: ["sqlserver"],
Stylus: ["styl|stylus"],
SVG: ["svg"],
Swift: ["swift"],
Tcl: ["tcl"],
Tex: ["tex"],
Text: ["txt"],
Textile: ["textile"],
Toml: ["toml"],
Twig: ["twig|swig"],
Typescript: ["ts|typescript|str"],
Vala: ["vala"],
VBScript: ["vbs|vb"],
Velocity: ["vm"],
Verilog: ["v|vh|sv|svh"],
VHDL: ["vhd|vhdl"],
XML: ["xml|rdf|rss|wsdl|xslt|atom|mathml|mml|xul|xbl|xaml"],
XQuery: ["xq"],
YAML: ["yaml|yml"],
Django: ["html"]
};
var nameOverrides = {
ObjectiveC: "Objective-C",
CSharp: "C#",
golang: "Go",
C_Cpp: "C and C++",
coffee: "CoffeeScript",
HTML_Ruby: "HTML (Ruby)",
FTL: "FreeMarker"
};
var modesByName = {};
for (var name in supportedModes) {
var data = supportedModes[name];
var displayName = (nameOverrides[name] || name).replace(/_/g, " ");
var filename = name.toLowerCase();
var mode = new Mode(filename, displayName, data[0]);
modesByName[filename] = mode;
modes.push(mode);
}
module.exports = {
getModeForPath: getModeForPath,
modes: modes,
modesByName: modesByName
};
});
ace.define("ace/ext/themelist",["require","exports","module","ace/lib/fixoldbrowsers"], function(require, exports, module) {
"use strict";
require("ace/lib/fixoldbrowsers");
var themeData = [
["Chrome" ],
["Clouds" ],
["Crimson Editor" ],
["Dawn" ],
["Dreamweaver" ],
["Eclipse" ],
["GitHub" ],
["IPlastic" ],
["Solarized Light"],
["TextMate" ],
["Tomorrow" ],
["XCode" ],
["Kuroir"],
["KatzenMilch"],
["SQL Server" ,"sqlserver" , "light"],
["Ambiance" ,"ambiance" , "dark"],
["Chaos" ,"chaos" , "dark"],
["Clouds Midnight" ,"clouds_midnight" , "dark"],
["Cobalt" ,"cobalt" , "dark"],
["idle Fingers" ,"idle_fingers" , "dark"],
["krTheme" ,"kr_theme" , "dark"],
["Merbivore" ,"merbivore" , "dark"],
["Merbivore Soft" ,"merbivore_soft" , "dark"],
["Mono Industrial" ,"mono_industrial" , "dark"],
["Monokai" ,"monokai" , "dark"],
["Pastel on dark" ,"pastel_on_dark" , "dark"],
["Solarized Dark" ,"solarized_dark" , "dark"],
["Terminal" ,"terminal" , "dark"],
["Tomorrow Night" ,"tomorrow_night" , "dark"],
["Tomorrow Night Blue" ,"tomorrow_night_blue" , "dark"],
["Tomorrow Night Bright","tomorrow_night_bright" , "dark"],
["Tomorrow Night 80s" ,"tomorrow_night_eighties" , "dark"],
["Twilight" ,"twilight" , "dark"],
["Vibrant Ink" ,"vibrant_ink" , "dark"]
];
exports.themesByName = {};
exports.themes = themeData.map(function(data) {
var name = data[1] || data[0].replace(/ /g, "_").toLowerCase();
var theme = {
caption: data[0],
theme: "ace/theme/" + name,
isDark: data[2] == "dark",
name: name
};
exports.themesByName[name] = theme;
return theme;
});
});
ace.define("ace/ext/menu_tools/add_editor_menu_options",["require","exports","module","ace/ext/modelist","ace/ext/themelist"], function(require, exports, module) {
'use strict';
module.exports.addEditorMenuOptions = function addEditorMenuOptions (editor) {
var modelist = require('../modelist');
var themelist = require('../themelist');
editor.menuOptions = {
setNewLineMode: [{
textContent: "unix",
value: "unix"
}, {
textContent: "windows",
value: "windows"
}, {
textContent: "auto",
value: "auto"
}],
setTheme: [],
setMode: [],
setKeyboardHandler: [{
textContent: "ace",
value: ""
}, {
textContent: "vim",
value: "ace/keyboard/vim"
}, {
textContent: "emacs",
value: "ace/keyboard/emacs"
}, {
textContent: "textarea",
value: "ace/keyboard/textarea"
}, {
textContent: "sublime",
value: "ace/keyboard/sublime"
}]
};
editor.menuOptions.setTheme = themelist.themes.map(function(theme) {
return {
textContent: theme.caption,
value: theme.theme
};
});
editor.menuOptions.setMode = modelist.modes.map(function(mode) {
return {
textContent: mode.name,
value: mode.mode
};
});
};
});
ace.define("ace/ext/menu_tools/get_set_functions",["require","exports","module"], function(require, exports, module) {
'use strict';
module.exports.getSetFunctions = function getSetFunctions (editor) {
var out = [];
var my = {
'editor' : editor,
'session' : editor.session,
'renderer' : editor.renderer
};
var opts = [];
var skip = [
'setOption',
'setUndoManager',
'setDocument',
'setValue',
'setBreakpoints',
'setScrollTop',
'setScrollLeft',
'setSelectionStyle',
'setWrapLimitRange'
];
['renderer', 'session', 'editor'].forEach(function(esra) {
var esr = my[esra];
var clss = esra;
for(var fn in esr) {
if(skip.indexOf(fn) === -1) {
if(/^set/.test(fn) && opts.indexOf(fn) === -1) {
opts.push(fn);
out.push({
'functionName' : fn,
'parentObj' : esr,
'parentName' : clss
});
}
}
}
});
return out;
};
});
ace.define("ace/ext/menu_tools/generate_settings_menu",["require","exports","module","ace/ext/menu_tools/element_generator","ace/ext/menu_tools/add_editor_menu_options","ace/ext/menu_tools/get_set_functions"], function(require, exports, module) {
'use strict';
var egen = require('./element_generator');
var addEditorMenuOptions = require('./add_editor_menu_options').addEditorMenuOptions;
var getSetFunctions = require('./get_set_functions').getSetFunctions;
module.exports.generateSettingsMenu = function generateSettingsMenu (editor) {
var elements = [];
function cleanupElementsList() {
elements.sort(function(a, b) {
var x = a.getAttribute('contains');
var y = b.getAttribute('contains');
return x.localeCompare(y);
});
}
function wrapElements() {
var topmenu = document.createElement('div');
topmenu.setAttribute('id', 'ace_settingsmenu');
elements.forEach(function(element) {
topmenu.appendChild(element);
});
var el = topmenu.appendChild(document.createElement('div'));
var version = "1.2.0";
el.style.padding = "1em";
el.textContent = "Ace version " + version;
return topmenu;
}
function createNewEntry(obj, clss, item, val) {
var el;
var div = document.createElement('div');
div.setAttribute('contains', item);
div.setAttribute('class', 'ace_optionsMenuEntry');
div.setAttribute('style', 'clear: both;');
div.appendChild(egen.createLabel(
item.replace(/^set/, '').replace(/([A-Z])/g, ' $1').trim(),
item
));
if (Array.isArray(val)) {
el = egen.createSelection(item, val, clss);
el.addEventListener('change', function(e) {
try{
editor.menuOptions[e.target.id].forEach(function(x) {
if(x.textContent !== e.target.textContent) {
delete x.selected;
}
});
obj[e.target.id](e.target.value);
} catch (err) {
throw new Error(err);
}
});
} else if(typeof val === 'boolean') {
el = egen.createCheckbox(item, val, clss);
el.addEventListener('change', function(e) {
try{
obj[e.target.id](!!e.target.checked);
} catch (err) {
throw new Error(err);
}
});
} else {
el = egen.createInput(item, val, clss);
el.addEventListener('change', function(e) {
try{
if(e.target.value === 'true') {
obj[e.target.id](true);
} else if(e.target.value === 'false') {
obj[e.target.id](false);
} else {
obj[e.target.id](e.target.value);
}
} catch (err) {
throw new Error(err);
}
});
}
el.style.cssText = 'float:right;';
div.appendChild(el);
return div;
}
function makeDropdown(item, esr, clss, fn) {
var val = editor.menuOptions[item];
var currentVal = esr[fn]();
if (typeof currentVal == 'object')
currentVal = currentVal.$id;
val.forEach(function(valuex) {
if (valuex.value === currentVal)
valuex.selected = 'selected';
});
return createNewEntry(esr, clss, item, val);
}
function handleSet(setObj) {
var item = setObj.functionName;
var esr = setObj.parentObj;
var clss = setObj.parentName;
var val;
var fn = item.replace(/^set/, 'get');
if(editor.menuOptions[item] !== undefined) {
elements.push(makeDropdown(item, esr, clss, fn));
} else if(typeof esr[fn] === 'function') {
try {
val = esr[fn]();
if(typeof val === 'object') {
val = val.$id;
}
elements.push(
createNewEntry(esr, clss, item, val)
);
} catch (e) {
}
}
}
addEditorMenuOptions(editor);
getSetFunctions(editor).forEach(function(setObj) {
handleSet(setObj);
});
cleanupElementsList();
return wrapElements();
};
});
ace.define("ace/ext/menu_tools/overlay_page",["require","exports","module","ace/lib/dom"], function(require, exports, module) {
'use strict';
var dom = require("../../lib/dom");
var cssText = "#ace_settingsmenu, #kbshortcutmenu {\
background-color: #F7F7F7;\
color: black;\
box-shadow: -5px 4px 5px rgba(126, 126, 126, 0.55);\
padding: 1em 0.5em 2em 1em;\
overflow: auto;\
position: absolute;\
margin: 0;\
bottom: 0;\
right: 0;\
top: 0;\
z-index: 9991;\
cursor: default;\
}\
.ace_dark #ace_settingsmenu, .ace_dark #kbshortcutmenu {\
box-shadow: -20px 10px 25px rgba(126, 126, 126, 0.25);\
background-color: rgba(255, 255, 255, 0.6);\
color: black;\
}\
.ace_optionsMenuEntry:hover {\
background-color: rgba(100, 100, 100, 0.1);\
-webkit-transition: all 0.5s;\
transition: all 0.3s\
}\
.ace_closeButton {\
background: rgba(245, 146, 146, 0.5);\
border: 1px solid #F48A8A;\
border-radius: 50%;\
padding: 7px;\
position: absolute;\
right: -8px;\
top: -8px;\
z-index: 1000;\
}\
.ace_closeButton{\
background: rgba(245, 146, 146, 0.9);\
}\
.ace_optionsMenuKey {\
color: darkslateblue;\
font-weight: bold;\
}\
.ace_optionsMenuCommand {\
color: darkcyan;\
font-weight: normal;\
}";
dom.importCssString(cssText);
module.exports.overlayPage = function overlayPage(editor, contentElement, top, right, bottom, left) {
top = top ? 'top: ' + top + ';' : '';
bottom = bottom ? 'bottom: ' + bottom + ';' : '';
right = right ? 'right: ' + right + ';' : '';
left = left ? 'left: ' + left + ';' : '';
var closer = document.createElement('div');
var contentContainer = document.createElement('div');
function documentEscListener(e) {
if (e.keyCode === 27) {
closer.click();
}
}
closer.style.cssText = 'margin: 0; padding: 0; ' +
'position: fixed; top:0; bottom:0; left:0; right:0;' +
'z-index: 9990; ' +
'background-color: rgba(0, 0, 0, 0.3);';
closer.addEventListener('click', function() {
document.removeEventListener('keydown', documentEscListener);
closer.parentNode.removeChild(closer);
editor.focus();
closer = null;
});
document.addEventListener('keydown', documentEscListener);
contentContainer.style.cssText = top + right + bottom + left;
contentContainer.addEventListener('click', function(e) {
e.stopPropagation();
});
var wrapper = dom.createElement("div");
wrapper.style.position = "relative";
var closeButton = dom.createElement("div");
closeButton.className = "ace_closeButton";
closeButton.addEventListener('click', function() {
closer.click();
});
wrapper.appendChild(closeButton);
contentContainer.appendChild(wrapper);
contentContainer.appendChild(contentElement);
closer.appendChild(contentContainer);
document.body.appendChild(closer);
editor.blur();
};
});
ace.define("ace/ext/settings_menu",["require","exports","module","ace/ext/menu_tools/generate_settings_menu","ace/ext/menu_tools/overlay_page","ace/editor"], function(require, exports, module) {
"use strict";
var generateSettingsMenu = require('./menu_tools/generate_settings_menu').generateSettingsMenu;
var overlayPage = require('./menu_tools/overlay_page').overlayPage;
function showSettingsMenu(editor) {
var sm = document.getElementById('ace_settingsmenu');
if (!sm)
overlayPage(editor, generateSettingsMenu(editor), '0', '0', '0');
}
module.exports.init = function(editor) {
var Editor = require("ace/editor").Editor;
Editor.prototype.showSettingsMenu = function() {
showSettingsMenu(this);
};
};
});
(function() {
ace.require(["ace/ext/settings_menu"], function() {});
})();
|
import mything.microfrontends.hello
import mything.microfrontends.counter
|
from mc.utils.mc_sandbox import McSandbox
def main():
sandbox = McSandbox()
flow_spec = generate_flow_spec()
sandbox.flow_record_client.create_flow_record_from_flow_spec(
flow_spec=flow_spec)
while sandbox.has_incomplete_items():
sandbox.flow_runner.tick()
claimed_jobs = sandbox.mc_db.claim_queue_items(
queue_key=sandbox.queues['job']['key'])['items']
for job in claimed_jobs:
run_job(job=job, mc_db=sandbox.mc_db)
print("No more flows to claim.")
def generate_flow_spec():
flow_spec = {
'label': 'example_flow',
'tasks': []
}
for i in range(3):
job_type = 'job.%s' % i
flow_spec['tasks'].append({
'key': 'job_{job_type}_task'.format(job_type=job_type),
'task_type': 'mc.tasks.job',
'task_params': {'job_type': job_type},
'precursors': ['ROOT'],
})
return flow_spec
def run_job(job=None, mc_db=None):
print("running job '{job_type}'".format(job_type=job['job_type']))
job['status'] = 'COMPLETED'
mc_db.patch_item(item_type='job', key=job['key'], patches={
'data': job['data'],
'status': job['status'],
})
if __name__ == '__main__':
main()
|
#include <gwrom.h>
#include <string.h>
#include <errno.h>
/* internal flags (1 << 16 to 1 << 23) */
#define GWROM_FREE_DATA ( 1 << 16 )
/******************************************************************************
zlib
******************************************************************************/
#ifdef GWROM_USE_GZIP
#include <zlib.h>
/* use gwrom allocation routines */
void* zalloc( void* opaque, uInt items, uInt size )
{
(void)opaque;
return gwrom_malloc( items * size );
}
/* use gwrom allocation routines */
void zfree( void* opaque, void* addr )
{
(void)opaque;
gwrom_free( addr );
}
static int identify_gzip( const void* data, size_t size )
{
/* basic header check */
const uint8_t* magic = (const uint8_t*)data;
if ( magic[ 0 ] != 0x1f || magic[ 1 ] != 0x8b || magic[ 2 ] != 8 )
{
return GWROM_INVALID_ROM;
}
/* TODO check the signature at the end of the data */
return GWROM_OK;
}
static int decompress_gzip( void** new_data, size_t* new_size, void* data, size_t size )
{
/* find the decompressed size */
/* TODO is this always little-endian in a gzipped file? */
*new_size = *(uint32_t*)( (char*)data + size - 4 );
/* allocate buffer */
*new_data = gwrom_malloc( *new_size );
if ( !*new_data )
{
return GWROM_NO_MEMORY;
}
/* decompress */
uLong dest_len = *new_size;
int res = uncompress( *new_data, &dest_len, data, size );
if ( res != Z_OK )
{
gwrom_free( *new_data );
return GWROM_INVALID_ROM;
}
return GWROM_OK;
}
#endif /* GWROM_USE_GZIP */
/******************************************************************************
bzip2
******************************************************************************/
#ifdef GWROM_USE_BZIP2
#include <bzlib.h>
#include <bzlib_private.h>
/* needed because of -DBZ_NO_STDIO, which should be defined for compilation */
void bz_internal_error( int errcode )
{
(void)errcode;
}
/* use gwrom allocation routines */
void* bzalloc( void* opaque, Int32 items, Int32 size )
{
(void)opaque;
return gwrom_malloc( items * size );
}
/* use gwrom allocation routines */
void bzfree( void* opaque, void* addr )
{
(void)opaque;
gwrom_free( addr );
}
static int identify_bzip2( const void* data, size_t size )
{
/* basic header check */
const char* magic = (const char*)data;
if ( magic[ 0 ] != 'B' || magic[ 1 ] != 'Z' || magic[ 2 ] != 'h' ||
magic[ 3 ] < '0' || magic[ 3 ] > '9' )
{
return GWROM_INVALID_ROM;
}
/* TODO check the signature at the end of the data */
return GWROM_OK;
}
static int decompress_bzip2( void** new_data, size_t* new_size, void* data, size_t size )
{
bz_stream stream;
/* setup the decompression stream */
stream.bzalloc = bzalloc;
stream.bzfree = bzfree;
int res = BZ2_bzDecompressInit( &stream, 0, 0 );
if ( res != BZ_OK )
{
return GWROM_INVALID_ROM;
}
stream.next_in = (char*)data;
stream.avail_in = (unsigned)size;
#ifdef GWROM_NO_REALLOC
/* first decompression run: evaluate size of decompressed data */
for ( ;; )
{
char buffer[ GWROM_DECOMP_BUFFER ];
stream.next_out = buffer;
stream.avail_out = sizeof( buffer );
res = BZ2_bzDecompress( &stream );
if ( res == BZ_STREAM_END )
{
break;
}
if ( res != BZ_OK )
{
BZ2_bzDecompressEnd( &stream );
return GWROM_INVALID_ROM;
}
}
/* basic check for when size_t can't hold 64-bit values */
if ( sizeof( size_t ) > 4 )
{
*new_size = stream.total_out_hi32;
*new_size = *new_size << 32 | stream.total_out_lo32;
}
else
{
if ( stream.total_out_hi32 != 0 )
{
BZ2_bzDecompressEnd( &stream );
return GWROM_NO_MEMORY;
}
*new_size = stream.total_out_lo32;
}
BZ2_bzDecompressEnd( &stream );
*new_data = gwrom_malloc( *new_size );
if ( !*new_data )
{
return GWROM_NO_MEMORY;
}
/* second decompression run: decompress data to the allocated buffer */
unsigned dest_len = *new_size;
res = BZ2_bzBuffToBuffDecompress( (char*)*new_data, &dest_len, (char*)data, size, 0, 0 );
if ( res != BZ_OK )
{
return GWROM_INVALID_ROM;
}
#else
*new_data = NULL;
*new_size = 0;
/* decompress while reallocating the decompressed data as necessary */
for ( ;; )
{
char buffer[ GWROM_DECOMP_BUFFER ];
stream.next_out = buffer;
stream.avail_out = sizeof( buffer );
res = BZ2_bzDecompress( &stream );
if ( res != BZ_OK && res != BZ_STREAM_END )
{
BZ2_bzDecompressEnd( &stream );
gwrom_free( *new_data );
return GWROM_INVALID_ROM;
}
size_t count = sizeof( buffer ) - stream.avail_out;
if ( count )
{
char* realloc_data = gwrom_realloc( *new_data, *new_size + count );
if ( realloc_data == NULL )
{
gwrom_free( *new_data );
return GWROM_NO_MEMORY;
}
*new_data = realloc_data;
memcpy( (void*)( (char*)*new_data + *new_size ), (void*)buffer, count );
*new_size += count;
}
if ( res == BZ_STREAM_END )
{
break;
}
}
BZ2_bzDecompressEnd( &stream );
#endif
return GWROM_OK;
}
/* use the default destroy method */
#define destroy_bzip2 default_destroy
/* bzip2 roms don't have any entries in them */
#define find_bzip2 default_find
#endif /* GWROM_USE_BZIP2 */
/******************************************************************************
uncompressed
******************************************************************************/
static int identify_uncompressed( const void* data, size_t size )
{
/* uncompressed data is always identified */
return GWROM_OK;
}
static int inflate_uncompressed( void** new_data, size_t* new_size, void* data, size_t size )
{
/*
returns the same data, the caller must check that it wasn't inflated to a new
buffer if it wants to copy the data into a new buffer
*/
*new_data = data;
*new_size = size;
return GWROM_OK;
}
/******************************************************************************
decompress methods
******************************************************************************/
typedef struct
{
/* returns GWROM_OK if the decompression method is identified */
int (*identify)( const void*, size_t );
/* decompresses the rom into a new buffer */
int (*decompress)( void**, size_t*, void*, size_t );
}
decompress_t;
/* all inflate algorithms must have an entry here */
static const decompress_t decompress[] =
{
#ifdef GWROM_USE_GZIP
{ identify_gzip, decompress_gzip },
#endif
#ifdef GWROM_USE_BZIP2
{ identify_bzip2, decompress_bzip2 },
#endif
/* add new inflate methods here */
{ identify_uncompressed, inflate_uncompressed },
};
/******************************************************************************
default rom methods
******************************************************************************/
static int default_init( gwrom_t* gwrom )
{
(void)gwrom;
return GWROM_OK;
}
static void default_destroy( gwrom_t* gwrom )
{
(void)gwrom;
}
static int default_find( gwrom_entry_t* file, gwrom_t* gwrom, const char* file_name )
{
(void)file;
(void)gwrom;
(void)file_name;
return GWROM_ENTRY_NOT_FOUND;
}
/******************************************************************************
tar archive, use --format=v7 with gnu tar
******************************************************************************/
#ifdef GWROM_USE_TAR_V7
typedef union
{
struct
{
char name[ 100 ];
char mode[ 8 ];
char owner[ 8 ];
char group[ 8 ];
char size[ 12 ];
char modification[ 12 ];
char checksum[ 8 ];
char type;
char linked[ 100 ];
/*
a space for the user to store things related to the entry, i.e. data has
been converted to little endian
CAUTION: things stored in user_flags are *not* persistent!
*/
uint32_t user_flags;
/*
a space for the user to store things related to the entry, i.e. a different
representation of the data
CAUTION: things stored in user_flags are *not* persistent!
*/
void* user_data;
} s;
char fill[ 512 ];
}
entry_tar_v7;
static int identify_tar_v7( const void* data, size_t size )
{
/* tar size is always a multiple of 512 */
if ( size & 511 )
{
return GWROM_INVALID_ROM;
}
entry_tar_v7* entry = (entry_tar_v7*)data;
char* end = (char*)data + size - 512;
/* iterate over the entries and do a basic chack on each on of them */
while ( (char*)entry <= end && entry->s.name[ 0 ] )
{
char* endptr;
long entry_size = strtol( entry->s.size, &endptr, 8 );
/* Check for a valid entry size */
if ( *endptr != 0 || errno == ERANGE )
{
return GWROM_INVALID_ROM;
}
char* name = entry->s.name;
char* endname = name + 100;
/* Check for a valid entry name */
do
{
if ( *name++ < 32 )
{
return GWROM_INVALID_ROM;
}
}
while ( *name && name < endname );
/* go to the next entry */
entry_size = ( entry_size + 511 ) / 512 + 1;
entry += entry_size;
}
/* the last entry must be followed by one or more empty entries */
if ( (char*)entry >= end )
{
return GWROM_INVALID_ROM;
}
/* check for empty entries */
do
{
int i;
for ( i = 0; i < 512; i++ )
{
if ( ( (char*)entry )[ i ] != 0 )
{
return GWROM_INVALID_ROM;
}
}
entry++;
}
while ( (char*)entry < end );
return GWROM_OK;
}
static int init_tar_v7( gwrom_t* gwrom )
{
entry_tar_v7* entry = (entry_tar_v7*)gwrom->data;
while ( entry->s.name[ 0 ] )
{
long entry_size = strtol( entry->s.size, NULL, 8 );
/* zero user space */
entry->s.user_flags = 0;
entry->s.user_data = NULL;
/* go to the next entry */
entry_size = ( entry_size + 511 ) / 512 + 1;
entry += entry_size;
}
return GWROM_OK;
}
static int find_tar_v7( gwrom_entry_t* file, gwrom_t* gwrom, const char* file_name )
{
entry_tar_v7* entry = (entry_tar_v7*)gwrom->data;
while ( entry->s.name[ 0 ] )
{
long entry_size = strtol( entry->s.size, NULL, 8 );
if ( !strcmp( entry->s.name, file_name ) )
{
/* found the entry, fill in gwrom_entry_t* */
file->name = entry->s.name;
file->data = (void*)( entry + 1 );
file->size = entry_size;
file->user_flags = &entry->s.user_flags;
return GWROM_OK;
}
/* go to the next entry */
entry_size = ( entry_size + 511 ) / 512 + 1;
entry += entry_size;
}
return GWROM_ENTRY_NOT_FOUND;
}
static void iterate_tar_v7( gwrom_t* gwrom, int (*callback)( gwrom_entry_t*, gwrom_t* ) )
{
entry_tar_v7* entry = (entry_tar_v7*)gwrom->data;
gwrom_entry_t file;
while ( entry->s.name[ 0 ] )
{
long entry_size = strtol( entry->s.size, NULL, 8 );
file.name = entry->s.name;
file.data = (void*)( entry + 1 );
file.size = entry_size;
file.user_flags = &entry->s.user_flags;
file.user_data = &entry->s.user_data;
if ( !callback( &file, gwrom ) )
{
return;
}
/* go to the next entry */
entry_size = ( entry_size + 511 ) / 512 + 1;
entry += entry_size;
}
}
/* tar doesn't need destruction */
#define destroy_tar_v7 default_destroy
#endif /* GWROM_USE_TAR_V7 */
/******************************************************************************
rom methods
******************************************************************************/
typedef struct
{
/* returns GWROM_OK if the rom type is identified */
int (*identify)( const void*, size_t );
/* initializes the rom */
int (*init)( gwrom_t* );
/* frees all memory allocated to the rom */
void (*destroy)( gwrom_t* );
/* finds an entry in the rom */
int (*find)( gwrom_entry_t*, gwrom_t*, const char* );
/* iterates over all rom entries */
void (*iterate)( gwrom_t*, int (*)( gwrom_entry_t*, gwrom_t* ) );
}
methods_t;
/* all supported rom types must have an entry here */
static const methods_t methods[] =
{
#ifdef GWROM_USE_TAR_V7
{ identify_tar_v7, init_tar_v7, destroy_tar_v7, find_tar_v7, iterate_tar_v7 },
#endif
/* add new rom types here */
};
/******************************************************************************
external API
******************************************************************************/
int gwrom_init( gwrom_t* gwrom, void* data, size_t size, uint32_t flags )
{
void* new_data = data;
size_t new_size = size;
unsigned i;
/* check for compressed roms first */
const decompress_t* decomp = decompress;
for ( i = 0; i < sizeof( decompress ) / sizeof( decompress[ 0 ] ); i++, decomp++ )
{
if ( decomp->identify( data, size ) == GWROM_OK )
{
if ( decomp->decompress( &new_data, &new_size, data, size ) != GWROM_OK )
{
return GWROM_INVALID_ROM;
}
/* check if data was copied into a new buffer */
if ( new_data != data )
{
/* yes, set flags to free the data */
flags |= GWROM_FREE_DATA;
}
else
{
/* no, check if the caller has asked to copy it */
if ( flags & GWROM_COPY_ALWAYS )
{
/* yes, copy data into a new buffer */
new_data = gwrom_malloc( size );
if ( !new_data )
{
return GWROM_NO_MEMORY;
}
memcpy( new_data, data, size );
new_size = size;
/* set flags to free the data */
flags |= GWROM_FREE_DATA;
}
}
break;
}
}
/* iterates over the supported types and compress algorithms */
const methods_t* meth = methods;
for ( i = 0; i < sizeof( methods ) / sizeof( methods[ 0 ] ); i++, meth++ )
{
if ( meth->identify( new_data, new_size ) == GWROM_OK )
{
/* type was identified, fill in gwrom and call its init method */
gwrom->data = new_data;
gwrom->size = new_size;
gwrom->flags = flags;
gwrom->destroy = meth->destroy;
gwrom->find = meth->find;
gwrom->iterate = meth->iterate;
return meth->init( gwrom );
}
}
/* rom not identified */
return GWROM_INVALID_ROM;
}
void gwrom_destroy( gwrom_t* gwrom )
{
/* calls the destroy method */
gwrom->destroy( gwrom );
/* free data */
if ( gwrom->flags & GWROM_FREE_DATA )
{
gwrom_free( gwrom->data );
}
/* zeroes gwrom data */
gwrom->data = NULL;
gwrom->size = 0;
gwrom->user_data = NULL;
gwrom->destroy = NULL;
gwrom->find = NULL;
}
const char* gwrom_error_message( int error )
{
switch ( error )
{
case GWROM_OK: return "Ok";
case GWROM_INVALID_ROM: return "Invalid ROM (corrupted file?)";
case GWROM_NO_MEMORY: return "Out of memory";
case GWROM_ENTRY_NOT_FOUND: return "Entry not found";
}
return "Unknown error";
}
|
const defaultTheme = require('tailwindcss/defaultTheme');
module.exports = {
mode: 'jit',
purge: [
'./vendor/laravel/framework/src/Illuminate/Pagination/resources/views/*.blade.php',
'./vendor/laravel/jetstream/**/*.blade.php',
'./storage/framework/views/*.php',
'./resources/views/**/*.blade.php',
],
theme: {
extend: {
fontFamily: {
sans: ['Nunito', ...defaultTheme.fontFamily.sans],
},
},
},
plugins: [require('@tailwindcss/forms'), require('@tailwindcss/typography')],
corePlugins: {
container: false,
}
};
|
exports.up = function (knex) {
return knex.schema.createTable('templates', function (table) {
table.increments();
table.string('title').nullable();
table.string('description').nullable();
table.string('before_instructions').nullable();
table.text('instructions').nullable();
table.string('after_instructions').nullable();
table.string('tags').nullable();
table.integer('created_by').nullable();
});
};exports.down = function (knex) {
return knex.schema.dropTable('templates');
};
|
import numpy as np
import torch
from sklearn.metrics import auc
import torch.nn.functional as F
from explainable_ai_image_measures.irof import IrofDataset
from explainable_ai_image_measures.pixel_relevancy import PixelRelevancyDataset
class Measures:
def __init__(self,
model,
batch_size=64,
irof_segments=40,
irof_sigma=5,
pixel_package_size=1,
normalize=True,
clip01=False,
baseline_color=None):
"""
Parametrize the future measurements
model: PyTorch model
batch_size: During each iteration batch_size number of images will be sent through the network simultaneously
irof_segments: Maximum number of slic segments, that we want to use for measuring. Only relevant if you compute
IROF later
irof_sigma: Parameter used in the slic algorithm
pixel_package_size: E.g. for imagenet you may have 224*224=50,176 pixels. Therefore sending 50,176 pixels
through the network may lead to too much overhead. Instead you can also remove / add blocks
of pixels to speed up the computation. Only relevant for IAUC, DAUC
normalize: With activated normalization the new probabilities are divided by the probabiilties of the old image.
This allows the comparison of attributions independent of how sure the network is for the original
image. Activating normalization is highly encouraged if comparing attributions across several
images.
clip01: Clips the computed probabilities between [0, 1]. This is only relevant for normalize=True.
In some cases the probabilities after e.g. removing parts of the original image may be higher than
before. E.g. for IROF this could theoretically lead to negative scores. If you want to prohibit this,
activate clip01. Note that the clipping clips each individual score. Indirectly you also ensure that
the final score is within [0,1]
baseline_color: For IROF and DAUC we iteratively remove parts of the image and replace it by the baseline
color as specified here. For IAUC we start with an image consisting only of the baseline_color.
By default the mean color is used.
"""
self.model = model
self.batch_size = batch_size
self.irof_segments = irof_segments
self.irof_sigma = irof_sigma
self.pixel_package_size = pixel_package_size
self.normalize = normalize
self.clip01 = clip01
self.baseline_color = baseline_color
def _calc_probs(self, image_batch, label):
probs = F.softmax(self.model(image_batch), dim=1)
return probs[:, label]
def _calc_single_score(self, scoring_dataset, label):
probs = []
with torch.no_grad():
for j, img_batch in enumerate(scoring_dataset):
probs += [self._calc_probs(img_batch, label)]
probs = torch.cat(probs).flatten()
if self.normalize:
probs = probs[:-1] / probs[-1]
else:
probs = probs[:-1]
if self.clip01:
probs = torch.clamp(probs, 0, 1)
probs = scoring_dataset.postprocess_scores(probs)
x = np.arange(0, len(probs))
y = probs.detach().cpu().numpy()
score = auc(x, y) / len(probs)
return score, probs.detach()
def _assert_check(self, image, attribution):
assert(len(image.shape) == 3)
assert(image.shape[1:] == attribution.shape)
if self.baseline_color is not None:
assert(len(self.baseline_color.shape) == 1)
assert(len(self.baseline_color) == image.shape[0])
def compute_IAUC(self, image, attribution, label):
"""
Computes IAUC for a single image and attribution
image: Torch.FloatTensor(color_channel, width, height)
attribution: Torch.FloatTensor(width, height)
label: Label of the attribution
"""
self._assert_check(image, attribution)
with torch.no_grad():
dataset = PixelRelevancyDataset(
image,
attribution,
True,
self.batch_size,
self.pixel_package_size,
image.device,
self.baseline_color
)
return self._calc_single_score(dataset, label)
def compute_DAUC(self, image, attribution, label):
"""
Computes DAUC for a single image and attribution
image: Torch.FloatTensor(color_channel, width, height)
attribution: Torch.FloatTensor(width, height)
label: Label of the attribution
"""
self._assert_check(image, attribution)
with torch.no_grad():
dataset = PixelRelevancyDataset(
image,
attribution,
False,
self.batch_size,
self.pixel_package_size,
image.device,
self.baseline_color
)
return self._calc_single_score(dataset, label)
def compute_IROF(self, image, attribution, label):
"""
Computes IROF for a single image and attribution
image: Torch.FloatTensor(color_channel, width, height)
attribution: Torch.FloatTensor(width, height)
label: Label of the attribution
"""
self._assert_check(image, attribution)
with torch.no_grad():
dataset = IrofDataset(
image,
attribution,
self.batch_size,
self.irof_segments,
self.irof_sigma,
image.device,
self.baseline_color
)
return self._calc_single_score(dataset, label)
def compute_batch(self, images, attributions, labels, IROF=True, IAUC=True, DAUC=True):
"""
Computes the batch for many images and allows multiple attributions per image.
image: Torch.FloatTensor(nr_images, color_channel, width, height)
attribution: (nr_images, nr_attributions_per_image, width, height)
labels: Tuple / Array / Tensor of Int
IROF: Defines, whether IROF is computed
IAUC: Defines, whether IAUC is computed
DAUC: Defines, whether DAUC is computed
"""
assert(len(images) == len(attributions))
assert(len(images) == len(labels))
functions = dict()
if IROF:
functions["IROF"] = self.compute_IROF
if IAUC:
functions["IAUC"] = self.compute_IAUC
if DAUC:
functions["DAUC"] = self.compute_DAUC
if len(functions) == 0:
return None
result = dict()
for method in functions:
scores = torch.zeros(attributions.shape[0:2])
probs = []
for img_id in range(len(images)):
probs.append([])
for attr_id in range(len(attributions[img_id])):
score, prob = functions[method](
images[img_id],
attributions[img_id, attr_id],
labels[img_id]
)
scores[img_id, attr_id] = score
probs[-1].append(prob)
probs[-1] = torch.stack(probs[-1])
result[method] = (scores, probs)
return result
|
// Copyright 2020 Maxime ROUFFET. All Rights Reserved.
#pragma once
#include <SPlanner/AI/Task/SP_TaskImpl.h>
#include "SP_ChainTask.generated.h"
/**
* Chain of Tasks.
*/
UCLASS(BlueprintType, Blueprintable, ClassGroup = "SPlanner|Task")
class SPLANNER_API USP_ChainTask : public USP_TaskImpl
{
GENERATED_BODY()
protected:
/** The handled tasks. */
UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "SPlanner|Task|Chain")
TArray<USP_TaskImpl*> Tasks;
bool IsAvailable(const USP_PlannerComponent* Planner) const override;
void PostExecution(const USP_PlannerComponent* Planner, bool bExecutionSuccess) override;
/** The pre-condition of the chain (ie: chain of pre-condition / post-condition through Impls). */
bool PreCondition_Implementation(const USP_PlanGenInfos* Infos) const override;
/** The post-condition of the action (ie: chain of all post-condition of Impls). */
bool PostCondition_Implementation(USP_PlanGenInfos* Infos) const override;
bool ResetPostCondition_Implementation(USP_PlanGenInfos* Infos) const override;
bool Begin_Internal_Implementation(USP_AIPlannerComponent* Planner, USP_TaskInfos* TaskInfos) override;
ESP_PlanExecutionState Tick_Internal_Implementation(float DeltaSeconds, USP_AIPlannerComponent* Planner, USP_TaskInfos* TaskInfos) override;
bool End_Internal_Implementation(USP_AIPlannerComponent* Planner, USP_TaskInfos* TaskInfos) override;
public:
USP_ChainTask(const FObjectInitializer& ObjectInitializer);
void OnNotify(USP_AIPlannerComponent* Planner, ESP_AIPlannerNotify Notify, USP_TaskInfos* TaskInfos) override;
};
/** Task info implementation for USP_ChainTask. */
UCLASS(BlueprintType, ClassGroup = "SPlanner|Task")
class USP_ChainTaskInfos : public USP_TaskInfos
{
GENERATED_BODY()
// Only accessible by USP_ChainTask.
friend USP_ChainTask;
int Index = 0;
bool bSubHasStarted = false;
public:
/**
* Current executed task infos.
* Must be UPROPERTY() to avoid garbage collection.
*/
UPROPERTY(BlueprintReadOnly, Category = "SPlanner|Task|Chain")
USP_TaskInfos* TaskInfos = nullptr;
};
|
#ifndef XMCOMP_NETWORK_H
#define XMCOMP_NETWORK_H
#include "common.h"
typedef struct {
int socket;
BOOL connected;
} Socket;
BOOL net_connect(Socket *, char *, int);
int net_send(Socket *, char *, int);
int net_recv(Socket *, char *, int);
void net_disconnect(Socket *);
BOOL net_stream(Socket *sock, char *from, char *to, char *password);
BOOL net_unstream(Socket *);
#endif
|
from __future__ import unicode_literals, division, absolute_import
import logging
from flexget import plugin
from flexget.event import event
from flexget.utils.log import log_once
try:
from flexget.plugins.api_rottentomatoes import lookup_movie, API_KEY
except ImportError:
raise plugin.DependencyError(issued_by='rottentomatoes_lookup', missing='api_rottentomatoes',
message='rottentomatoes_lookup requires the `api_rottentomatoes` plugin')
log = logging.getLogger('rottentomatoes_lookup')
def get_imdb_id(movie):
for alt_id in movie.alternate_ids:
if alt_id.name == 'imdb':
return 'tt' + alt_id.id
def get_rt_url(movie):
for link in movie.links:
if link.name == 'alternate':
return link.url
class PluginRottenTomatoesLookup(object):
"""
Retrieves Rotten Tomatoes information for entries.
Example::
rottentomatoes_lookup: yes
"""
field_map = {
'rt_name': 'title',
'rt_id': 'id',
'imdb_id': get_imdb_id,
'rt_year': 'year',
'rt_genres': lambda movie: [genre.name for genre in movie.genres],
'rt_mpaa_rating': 'mpaa_rating',
'rt_runtime': 'runtime',
'rt_critics_consensus': 'critics_consensus',
'rt_releases': lambda movie: dict((release.name, release.date) for
release in movie.release_dates),
'rt_critics_rating': 'critics_rating',
'rt_critics_score': 'critics_score',
'rt_audience_rating': 'audience_rating',
'rt_audience_score': 'audience_score',
'rt_average_score': lambda movie: (movie.critics_score + movie.audience_score) / 2,
'rt_synopsis': 'synopsis',
'rt_posters': lambda movie: dict((poster.name, poster.url) for poster in movie.posters),
'rt_actors': lambda movie: [actor.name for actor in movie.cast],
'rt_directors': lambda movie: [director.name for director in movie.directors],
'rt_studio': 'studio',
'rt_alternate_ids': lambda movie: dict((alt_id.name, alt_id.id)
for alt_id in movie.alternate_ids),
'rt_url': get_rt_url,
# Generic fields filled by all movie lookup plugins:
'movie_name': 'title',
'movie_year': 'year'}
schema = {'oneOf': [
{'type': 'boolean'},
{'type': 'string', 'description': 'provide a custom api key'}
]}
def lazy_loader(self, entry, field):
"""Does the lookup for this entry and populates the entry fields.
:param entry: entry to perform lookup on
:param field: the field to be populated (others may be populated as well)
:returns: the field value
"""
try:
self.lookup(entry, key=self.key)
except plugin.PluginError as e:
log_once(e.value.capitalize(), logger=log)
# Set all of our fields to None if the lookup failed
entry.unregister_lazy_fields(self.field_map, self.lazy_loader)
return entry[field]
def lookup(self, entry, search_allowed=True, key=None):
"""
Perform Rotten Tomatoes lookup for entry.
:param entry: Entry instance
:param search_allowed: Allow fallback to search
:param key: optionally specify an API key to use
:raises PluginError: Failure reason
"""
if not key:
key = API_KEY
movie = lookup_movie(smart_match=entry['title'],
rottentomatoes_id=entry.get('rt_id', eval_lazy=False),
only_cached=(not search_allowed),
api_key=key
)
log.debug(u'Got movie: %s' % movie)
entry.update_using_map(self.field_map, movie)
def on_task_metainfo(self, task, config):
if not config:
return
if isinstance(config, basestring):
self.key = config.lower()
else:
self.key = None
for entry in task.entries:
entry.register_lazy_fields(self.field_map, self.lazy_loader)
@event('plugin.register')
def register_plugin():
plugin.register(PluginRottenTomatoesLookup, 'rottentomatoes_lookup', api_ver=2)
|
(window["webpackJsonp"]=window["webpackJsonp"]||[]).push([["chunk-733c35b2"],{"294d":function(e,t,i){"use strict";var n=function(){var e=this,t=e.$createElement,i=e._self._c||t;return i("div",{staticStyle:{"line-height":"1.8"}},[1==e.qType||2==e.qType?i("div",{directives:[{name:"loading",rawName:"v-loading",value:e.qLoading,expression:"qLoading"}]},[i("div",{staticClass:"q-title",domProps:{innerHTML:e._s(e.question.title)}}),i("div",{staticClass:"q-content"},e._l(e.question.items,(function(t){return i("span",{key:t.id,staticClass:"q-item-contain"},[i("span",{staticClass:"q-item-prefix"},[e._v(e._s(t.prefix))]),i("span",{staticClass:"q-item-content",domProps:{innerHTML:e._s(t.content)}})])})),0)]):3==e.qType?i("div",{directives:[{name:"loading",rawName:"v-loading",value:e.qLoading,expression:"qLoading"}]},[i("div",{staticClass:"q-title",staticStyle:{display:"inline","margin-right":"10px"},domProps:{innerHTML:e._s(e.question.title)}}),i("span",[e._v("(")]),e._l(e.question.items,(function(t){return i("span",{key:t.id},[i("span",{staticClass:"q-item-content",domProps:{innerHTML:e._s(t.content)}})])})),i("span",[e._v(")")])],2):4==e.qType||5==e.qType?i("div",{directives:[{name:"loading",rawName:"v-loading",value:e.qLoading,expression:"qLoading"}]},[i("div",{staticClass:"q-title",domProps:{innerHTML:e._s(e.question.title)}})]):i("div")])},r=[],o=(i("a9e3"),{name:"QuestionShow",props:{question:{type:Object,default:function(){return{}}},qLoading:{type:Boolean,default:!1},qType:{type:Number,default:0}},methods:{}}),s=o,a=i("2877"),l=Object(a["a"])(s,n,r,!1,null,null,null);t["a"]=l.exports},"2ac8":function(e,t,i){"use strict";var n=i("b775");t["a"]={pageList:function(e){return Object(n["a"])("/api/admin/question/page",e)},edit:function(e){return Object(n["a"])("/api/admin/question/edit",e)},select:function(e){return Object(n["a"])("/api/admin/question/select/"+e)},deleteQuestion:function(e){return Object(n["a"])("/api/admin/question/delete/"+e)}}},"2fca":function(e,t,i){"use strict";i.r(t);var n=function(){var e=this,t=e.$createElement,i=e._self._c||t;return i("div",{staticClass:"app-container"},[i("el-form",{directives:[{name:"loading",rawName:"v-loading",value:e.formLoading,expression:"formLoading"}],ref:"form",attrs:{model:e.form,"label-width":"100px",rules:e.rules}},[i("el-form-item",{attrs:{label:"年级:",prop:"gradeLevel",required:""}},[i("el-select",{attrs:{placeholder:"年级"},on:{change:e.levelChange},model:{value:e.form.gradeLevel,callback:function(t){e.$set(e.form,"gradeLevel",t)},expression:"form.gradeLevel"}},e._l(e.levelEnum,(function(e){return i("el-option",{key:e.key,attrs:{value:e.key,label:e.value}})})),1)],1),i("el-form-item",{attrs:{label:"学科:",prop:"subjectId",required:""}},[i("el-select",{attrs:{placeholder:"学科"},model:{value:e.form.subjectId,callback:function(t){e.$set(e.form,"subjectId",t)},expression:"form.subjectId"}},e._l(e.subjectFilter,(function(e){return i("el-option",{key:e.id,attrs:{value:e.id,label:e.name+" ( "+e.levelName+" )"}})})),1)],1),i("el-form-item",{attrs:{label:"题干:",prop:"title",required:""}},[i("el-input",{on:{focus:function(t){return e.inputClick(e.form,"title")}},model:{value:e.form.title,callback:function(t){e.$set(e.form,"title",t)},expression:"form.title"}})],1),i("el-form-item",{attrs:{label:"答案:",prop:"correct",required:""}},[i("el-input",{on:{focus:function(t){return e.inputClick(e.form,"correct")}},model:{value:e.form.correct,callback:function(t){e.$set(e.form,"correct",t)},expression:"form.correct"}})],1),i("el-form-item",{attrs:{label:"解析:",prop:"analyze",required:""}},[i("el-input",{on:{focus:function(t){return e.inputClick(e.form,"analyze")}},model:{value:e.form.analyze,callback:function(t){e.$set(e.form,"analyze",t)},expression:"form.analyze"}})],1),i("el-form-item",{attrs:{label:"分数:",prop:"score",required:""}},[i("el-input-number",{attrs:{precision:1,step:1,max:100},model:{value:e.form.score,callback:function(t){e.$set(e.form,"score",t)},expression:"form.score"}})],1),i("el-form-item",{attrs:{label:"难度:",required:""}},[i("el-rate",{staticClass:"question-item-rate",model:{value:e.form.difficult,callback:function(t){e.$set(e.form,"difficult",t)},expression:"form.difficult"}})],1),i("el-form-item",[i("el-button",{attrs:{type:"primary"},on:{click:e.submitForm}},[e._v("提交")]),i("el-button",{on:{click:e.resetForm}},[e._v("重置")]),i("el-button",{attrs:{type:"success"},on:{click:e.showQuestion}},[e._v("预览")])],1)],1),i("el-dialog",{staticStyle:{width:"100%",height:"100%"},attrs:{visible:e.richEditor.dialogVisible,"append-to-body":"","close-on-click-modal":!1,"show-close":!1,center:""},on:{"update:visible":function(t){return e.$set(e.richEditor,"dialogVisible",t)}}},[i("Ueditor",{on:{ready:e.editorReady}}),i("span",{staticClass:"dialog-footer",attrs:{slot:"footer"},slot:"footer"},[i("el-button",{attrs:{type:"primary"},on:{click:e.editorConfirm}},[e._v("确 定")]),i("el-button",{on:{click:function(t){e.richEditor.dialogVisible=!1}}},[e._v("取 消")])],1)],1),i("el-dialog",{staticStyle:{width:"100%",height:"100%"},attrs:{visible:e.questionShow.dialog},on:{"update:visible":function(t){return e.$set(e.questionShow,"dialog",t)}}},[i("QuestionShow",{attrs:{qType:e.questionShow.qType,question:e.questionShow.question,qLoading:e.questionShow.loading}})],1)],1)},r=[],o=i("5530"),s=(i("4de4"),i("294d")),a=i("63f4"),l=i("2f62"),c=i("2ac8"),u={components:{Ueditor:a["a"],QuestionShow:s["a"]},data:function(){return{form:{id:null,questionType:5,gradeLevel:null,subjectId:null,title:"",items:[],analyze:"",correct:"",score:"",difficult:0},subjectFilter:null,formLoading:!1,rules:{gradeLevel:[{required:!0,message:"请选择年级",trigger:"change"}],subjectId:[{required:!0,message:"请选择学科",trigger:"change"}],title:[{required:!0,message:"请输入题干",trigger:"blur"}],correct:[{required:!0,message:"请输入答案",trigger:"blur"}],analyze:[{required:!0,message:"请输入解析",trigger:"blur"}],score:[{required:!0,message:"请输入分数",trigger:"blur"}]},richEditor:{dialogVisible:!1,object:null,parameterName:"",instance:null},questionShow:{qType:0,dialog:!1,question:null,loading:!1}}},created:function(){var e=this.$route.query.id,t=this;this.initSubject((function(){t.subjectFilter=t.subjects})),e&&0!==parseInt(e)&&(t.formLoading=!0,c["a"].select(e).then((function(e){t.form=e.response,t.formLoading=!1})))},methods:Object(o["a"])(Object(o["a"])({editorReady:function(e){this.richEditor.instance=e;var t=this.richEditor.object[this.richEditor.parameterName];this.richEditor.instance.setContent(t),this.richEditor.instance.focus(!0)},inputClick:function(e,t){this.richEditor.object=e,this.richEditor.parameterName=t,this.richEditor.dialogVisible=!0},editorConfirm:function(){var e=this.richEditor.instance.getContent();this.richEditor.object[this.richEditor.parameterName]=e,this.richEditor.dialogVisible=!1},submitForm:function(){var e=this,t=this;this.$refs.form.validate((function(i){if(!i)return!1;e.formLoading=!0,c["a"].edit(e.form).then((function(i){1===i.code?(t.$message.success(i.message),t.delCurrentView(t).then((function(){t.$router.push("/exam/question/list")}))):(t.$message.error(i.message),e.formLoading=!1)})).catch((function(t){e.formLoading=!1}))}))},resetForm:function(){var e=this.form.id;this.$refs["form"].resetFields(),this.form={id:null,questionType:5,gradeLevel:null,subjectId:null,title:"",items:[],analyze:"",correct:"",score:"",difficult:0},this.form.id=e},levelChange:function(){var e=this;this.form.subjectId=null,this.subjectFilter=this.subjects.filter((function(t){return t.level===e.form.gradeLevel}))},showQuestion:function(){this.questionShow.dialog=!0,this.questionShow.qType=this.form.questionType,this.questionShow.question=this.form}},Object(l["b"])("exam",{initSubject:"initSubject"})),Object(l["b"])("tagsView",{delCurrentView:"delCurrentView"})),computed:Object(o["a"])(Object(o["a"])(Object(o["a"])({},Object(l["c"])("enumItem",["enumFormat"])),Object(l["e"])("enumItem",{questionTypeEnum:function(e){return e.exam.question.typeEnum},levelEnum:function(e){return e.user.levelEnum}})),Object(l["e"])("exam",{subjects:function(e){return e.subjects}}))},d=u,m=i("2877"),f=Object(m["a"])(d,n,r,!1,null,null,null);t["default"]=f.exports},"63f4":function(e,t,i){"use strict";var n=function(){var e=this,t=e.$createElement,i=e._self._c||t;return i("div",[i("script",{staticStyle:{height:"300px"},attrs:{id:e.randomId,type:"text/plain"}})])},r=[],o={name:"UE",props:{value:{default:function(){return""}}},data:function(){return{randomId:"editor_"+1e17*Math.random(),instance:null,ready:!1}},watch:{value:function(e,t){null!=e&&this.ready&&(this.instance=UE.getEditor(this.randomId),this.instance.setContent(e))}},mounted:function(){this.initEditor()},beforeDestroy:function(){null!==this.instance&&this.instance.destroy&&this.instance.destroy()},methods:{initEditor:function(){var e=this;this.$nextTick((function(){e.instance=UE.getEditor(e.randomId),e.instance.addListener("ready",(function(){e.ready=!0,e.$emit("ready",e.instance)}))}))},getUEContent:function(){return this.instance.getContent()},setText:function(e){this.instance=UE.getEditor(this.randomId),this.instance.setContent(e)}}},s=o,a=i("2877"),l=Object(a["a"])(s,n,r,!1,null,null,null);t["a"]=l.exports}}]);
|
""" Closuers
Free variables and closures
Remember: Functions defined inside another function can access the outer (nonLocal) variables
"""
def outer():
x = 'python'
/ this x refers to the one in outer's scope', this nonlocal variable x is called a free variable
def inner(): /
print("{0} rocks!".format(x)) when we consider inner, we are really looking at:
The function inner
the free variable x (with the current value python)
This is called a closure, # x thru the print statement
inner()
outer() # python rocks!
""" Returning the inner function
What happens if, instead of calling(running) inner from inside outer, we rune it?
def outer():
x = 'python'
def inner():
print("{0} rocks!".format(x))
"""
|
import random
from eth2spec.test.helpers.block import (
build_empty_block_for_next_slot,
)
from eth2spec.test.helpers.state import (
state_transition_and_sign_block,
transition_to,
)
from eth2spec.test.helpers.constants import (
MAINNET, MINIMAL,
)
from eth2spec.test.helpers.sync_committee import (
compute_aggregate_sync_committee_signature,
compute_committee_indices,
get_committee_indices,
run_sync_committee_processing,
run_successful_sync_committee_test,
)
from eth2spec.test.context import (
with_altair_and_later,
with_presets,
spec_state_test,
always_bls,
)
@with_altair_and_later
@spec_state_test
@always_bls
def test_invalid_signature_bad_domain(spec, state):
committee_indices = compute_committee_indices(spec, state, state.current_sync_committee)
block = build_empty_block_for_next_slot(spec, state)
block.body.sync_aggregate = spec.SyncAggregate(
sync_committee_bits=[True] * len(committee_indices),
sync_committee_signature=compute_aggregate_sync_committee_signature(
spec,
state,
block.slot - 1,
committee_indices, # full committee signs
domain_type=spec.DOMAIN_BEACON_ATTESTER, # Incorrect domain
)
)
yield from run_sync_committee_processing(spec, state, block, expect_exception=True)
@with_altair_and_later
@spec_state_test
@always_bls
def test_invalid_signature_missing_participant(spec, state):
committee_indices = compute_committee_indices(spec, state, state.current_sync_committee)
rng = random.Random(2020)
random_participant = rng.choice(committee_indices)
block = build_empty_block_for_next_slot(spec, state)
# Exclude one participant whose signature was included.
block.body.sync_aggregate = spec.SyncAggregate(
sync_committee_bits=[index != random_participant for index in committee_indices],
sync_committee_signature=compute_aggregate_sync_committee_signature(
spec,
state,
block.slot - 1,
committee_indices, # full committee signs
)
)
yield from run_sync_committee_processing(spec, state, block, expect_exception=True)
@with_altair_and_later
@spec_state_test
@always_bls
def test_invalid_signature_no_participants(spec, state):
block = build_empty_block_for_next_slot(spec, state)
# No participants is an allowed case, but needs a specific signature, not the full-zeroed signature.
block.body.sync_aggregate = spec.SyncAggregate(
sync_committee_bits=[False] * len(block.body.sync_aggregate.sync_committee_bits),
sync_committee_signature=b'\x00' * 96
)
yield from run_sync_committee_processing(spec, state, block, expect_exception=True)
# No-participants, with valid signature, is tested in test_sync_committee_rewards_empty_participants already.
@with_altair_and_later
@spec_state_test
@always_bls
def test_invalid_signature_infinite_signature_with_all_participants(spec, state):
block = build_empty_block_for_next_slot(spec, state)
# Include all participants, try the special-case signature for no-participants
block.body.sync_aggregate = spec.SyncAggregate(
sync_committee_bits=[True] * len(block.body.sync_aggregate.sync_committee_bits),
sync_committee_signature=spec.G2_POINT_AT_INFINITY
)
yield from run_sync_committee_processing(spec, state, block, expect_exception=True)
@with_altair_and_later
@spec_state_test
@always_bls
def test_invalid_signature_infinite_signature_with_single_participant(spec, state):
block = build_empty_block_for_next_slot(spec, state)
# Try include a single participant with the special-case signature for no-participants.
block.body.sync_aggregate = spec.SyncAggregate(
sync_committee_bits=[True] + ([False] * (len(block.body.sync_aggregate.sync_committee_bits) - 1)),
sync_committee_signature=spec.G2_POINT_AT_INFINITY
)
yield from run_sync_committee_processing(spec, state, block, expect_exception=True)
@with_altair_and_later
@spec_state_test
@always_bls
def test_invalid_signature_extra_participant(spec, state):
committee_indices = compute_committee_indices(spec, state, state.current_sync_committee)
rng = random.Random(3030)
random_participant = rng.choice(committee_indices)
block = build_empty_block_for_next_slot(spec, state)
# Exclude one signature even though the block claims the entire committee participated.
block.body.sync_aggregate = spec.SyncAggregate(
sync_committee_bits=[True] * len(committee_indices),
sync_committee_signature=compute_aggregate_sync_committee_signature(
spec,
state,
block.slot - 1,
[index for index in committee_indices if index != random_participant],
)
)
yield from run_sync_committee_processing(spec, state, block, expect_exception=True)
@with_altair_and_later
@with_presets([MINIMAL], reason="to create nonduplicate committee")
@spec_state_test
def test_sync_committee_rewards_nonduplicate_committee(spec, state):
committee_indices = get_committee_indices(spec, state, duplicates=False)
committee_size = len(committee_indices)
committee_bits = [True] * committee_size
active_validator_count = len(spec.get_active_validator_indices(state, spec.get_current_epoch(state)))
# Preconditions of this test case
assert active_validator_count > spec.SYNC_COMMITTEE_SIZE
assert committee_size == len(set(committee_indices))
yield from run_successful_sync_committee_test(spec, state, committee_indices, committee_bits)
@with_altair_and_later
@with_presets([MAINNET], reason="to create duplicate committee")
@spec_state_test
def test_sync_committee_rewards_duplicate_committee_no_participation(spec, state):
committee_indices = get_committee_indices(spec, state, duplicates=True)
committee_size = len(committee_indices)
committee_bits = [False] * committee_size
active_validator_count = len(spec.get_active_validator_indices(state, spec.get_current_epoch(state)))
# Preconditions of this test case
assert active_validator_count < spec.SYNC_COMMITTEE_SIZE
assert committee_size > len(set(committee_indices))
yield from run_successful_sync_committee_test(spec, state, committee_indices, committee_bits)
@with_altair_and_later
@with_presets([MAINNET], reason="to create duplicate committee")
@spec_state_test
def test_sync_committee_rewards_duplicate_committee_half_participation(spec, state):
committee_indices = get_committee_indices(spec, state, duplicates=True)
committee_size = len(committee_indices)
committee_bits = [True] * (committee_size // 2) + [False] * (committee_size // 2)
assert len(committee_bits) == committee_size
active_validator_count = len(spec.get_active_validator_indices(state, spec.get_current_epoch(state)))
# Preconditions of this test case
assert active_validator_count < spec.SYNC_COMMITTEE_SIZE
assert committee_size > len(set(committee_indices))
yield from run_successful_sync_committee_test(spec, state, committee_indices, committee_bits)
@with_altair_and_later
@with_presets([MAINNET], reason="to create duplicate committee")
@spec_state_test
def test_sync_committee_rewards_duplicate_committee_full_participation(spec, state):
committee_indices = get_committee_indices(spec, state, duplicates=True)
committee_size = len(committee_indices)
committee_bits = [True] * committee_size
active_validator_count = len(spec.get_active_validator_indices(state, spec.get_current_epoch(state)))
# Preconditions of this test case
assert active_validator_count < spec.SYNC_COMMITTEE_SIZE
assert committee_size > len(set(committee_indices))
yield from run_successful_sync_committee_test(spec, state, committee_indices, committee_bits)
@with_altair_and_later
@spec_state_test
@always_bls
def test_sync_committee_rewards_not_full_participants(spec, state):
committee_indices = compute_committee_indices(spec, state, state.current_sync_committee)
rng = random.Random(1010)
committee_bits = [rng.choice([True, False]) for _ in committee_indices]
yield from run_successful_sync_committee_test(spec, state, committee_indices, committee_bits)
@with_altair_and_later
@spec_state_test
@always_bls
def test_sync_committee_rewards_empty_participants(spec, state):
committee_indices = compute_committee_indices(spec, state, state.current_sync_committee)
committee_bits = [False for _ in committee_indices]
yield from run_successful_sync_committee_test(spec, state, committee_indices, committee_bits)
@with_altair_and_later
@spec_state_test
@always_bls
def test_invalid_signature_past_block(spec, state):
committee_indices = compute_committee_indices(spec, state, state.current_sync_committee)
for _ in range(2):
# NOTE: need to transition twice to move beyond the degenerate case at genesis
block = build_empty_block_for_next_slot(spec, state)
# Valid sync committee signature here...
block.body.sync_aggregate = spec.SyncAggregate(
sync_committee_bits=[True] * len(committee_indices),
sync_committee_signature=compute_aggregate_sync_committee_signature(
spec,
state,
block.slot - 1,
committee_indices,
)
)
state_transition_and_sign_block(spec, state, block)
invalid_block = build_empty_block_for_next_slot(spec, state)
# Invalid signature from a slot other than the previous
invalid_block.body.sync_aggregate = spec.SyncAggregate(
sync_committee_bits=[True] * len(committee_indices),
sync_committee_signature=compute_aggregate_sync_committee_signature(
spec,
state,
invalid_block.slot - 2,
committee_indices,
)
)
yield from run_sync_committee_processing(spec, state, invalid_block, expect_exception=True)
@with_altair_and_later
@with_presets([MINIMAL], reason="to produce different committee sets")
@spec_state_test
@always_bls
def test_invalid_signature_previous_committee(spec, state):
# NOTE: the `state` provided is at genesis and the process to select
# sync committees currently returns the same committee for the first and second
# periods at genesis.
# To get a distinct committee so we can generate an "old" signature, we need to advance
# 2 EPOCHS_PER_SYNC_COMMITTEE_PERIOD periods.
current_epoch = spec.get_current_epoch(state)
old_sync_committee = state.next_sync_committee
epoch_in_future_sync_commitee_period = current_epoch + 2 * spec.EPOCHS_PER_SYNC_COMMITTEE_PERIOD
slot_in_future_sync_committee_period = epoch_in_future_sync_commitee_period * spec.SLOTS_PER_EPOCH
transition_to(spec, state, slot_in_future_sync_committee_period)
# Use the previous sync committee to produce the signature.
# Ensure that the pubkey sets are different.
assert set(old_sync_committee.pubkeys) != set(state.current_sync_committee.pubkeys)
committee_indices = compute_committee_indices(spec, state, old_sync_committee)
block = build_empty_block_for_next_slot(spec, state)
block.body.sync_aggregate = spec.SyncAggregate(
sync_committee_bits=[True] * len(committee_indices),
sync_committee_signature=compute_aggregate_sync_committee_signature(
spec,
state,
block.slot - 1,
committee_indices,
)
)
yield from run_sync_committee_processing(spec, state, block, expect_exception=True)
@with_altair_and_later
@spec_state_test
@always_bls
@with_presets([MINIMAL], reason="too slow")
def test_valid_signature_future_committee(spec, state):
# NOTE: the `state` provided is at genesis and the process to select
# sync committees currently returns the same committee for the first and second
# periods at genesis.
# To get a distinct committee so we can generate an "old" signature, we need to advance
# 2 EPOCHS_PER_SYNC_COMMITTEE_PERIOD periods.
current_epoch = spec.get_current_epoch(state)
old_current_sync_committee = state.current_sync_committee
old_next_sync_committee = state.next_sync_committee
epoch_in_future_sync_committee_period = current_epoch + 2 * spec.EPOCHS_PER_SYNC_COMMITTEE_PERIOD
slot_in_future_sync_committee_period = epoch_in_future_sync_committee_period * spec.SLOTS_PER_EPOCH
transition_to(spec, state, slot_in_future_sync_committee_period)
sync_committee = state.current_sync_committee
next_sync_committee = state.next_sync_committee
assert next_sync_committee != sync_committee
assert sync_committee != old_current_sync_committee
assert sync_committee != old_next_sync_committee
committee_indices = compute_committee_indices(spec, state, sync_committee)
block = build_empty_block_for_next_slot(spec, state)
block.body.sync_aggregate = spec.SyncAggregate(
sync_committee_bits=[True] * len(committee_indices),
sync_committee_signature=compute_aggregate_sync_committee_signature(
spec,
state,
block.slot - 1,
committee_indices,
)
)
yield from run_sync_committee_processing(spec, state, block)
@with_altair_and_later
@spec_state_test
@always_bls
@with_presets([MINIMAL], reason="prefer short search to find matching proposer")
def test_proposer_in_committee_without_participation(spec, state):
committee_indices = compute_committee_indices(spec, state, state.current_sync_committee)
# NOTE: seem to reliably be getting a matching proposer in the first epoch w/ ``MINIMAL`` preset.
for _ in range(spec.SLOTS_PER_EPOCH):
block = build_empty_block_for_next_slot(spec, state)
proposer_index = block.proposer_index
proposer_pubkey = state.validators[proposer_index].pubkey
proposer_is_in_sync_committee = proposer_pubkey in state.current_sync_committee.pubkeys
if proposer_is_in_sync_committee:
participation = [index != proposer_index for index in committee_indices]
participants = [index for index in committee_indices if index != proposer_index]
else:
participation = [True for _ in committee_indices]
participants = committee_indices
# Valid sync committee signature here...
block.body.sync_aggregate = spec.SyncAggregate(
sync_committee_bits=participation,
sync_committee_signature=compute_aggregate_sync_committee_signature(
spec,
state,
block.slot - 1,
participants,
)
)
if proposer_is_in_sync_committee:
assert state.validators[block.proposer_index].pubkey in state.current_sync_committee.pubkeys
yield from run_sync_committee_processing(spec, state, block)
break
else:
state_transition_and_sign_block(spec, state, block)
else:
raise AssertionError("failed to find a proposer in the sync committee set; check test setup")
@with_altair_and_later
@spec_state_test
@always_bls
@with_presets([MINIMAL], reason="prefer short search to find matching proposer")
def test_proposer_in_committee_with_participation(spec, state):
committee_indices = compute_committee_indices(spec, state, state.current_sync_committee)
participation = [True for _ in committee_indices]
# NOTE: seem to reliably be getting a matching proposer in the first epoch w/ ``MINIMAL`` preset.
for _ in range(spec.SLOTS_PER_EPOCH):
block = build_empty_block_for_next_slot(spec, state)
proposer_index = block.proposer_index
proposer_pubkey = state.validators[proposer_index].pubkey
proposer_is_in_sync_committee = proposer_pubkey in state.current_sync_committee.pubkeys
# Valid sync committee signature here...
block.body.sync_aggregate = spec.SyncAggregate(
sync_committee_bits=participation,
sync_committee_signature=compute_aggregate_sync_committee_signature(
spec,
state,
block.slot - 1,
committee_indices,
)
)
if proposer_is_in_sync_committee:
assert state.validators[block.proposer_index].pubkey in state.current_sync_committee.pubkeys
yield from run_sync_committee_processing(spec, state, block)
return
else:
state_transition_and_sign_block(spec, state, block)
raise AssertionError("failed to find a proposer in the sync committee set; check test setup")
|
export default (() => {
let o;
return Jymfony.Component.VarExporter.Internal.Hydrator.hydrate(
o = [
(new ReflectionClass('Jymfony.Component.DateTime.Internal.RuleSet')).newInstanceWithoutConstructor(),
(new ReflectionClass('Jymfony.Component.DateTime.Internal.Rule')).newInstanceWithoutConstructor(),
(new ReflectionClass('Jymfony.Component.DateTime.Internal.Rule')).newInstanceWithoutConstructor(),
(new ReflectionClass('Jymfony.Component.DateTime.Internal.Rule')).newInstanceWithoutConstructor(),
(new ReflectionClass('Jymfony.Component.DateTime.Internal.Rule')).newInstanceWithoutConstructor(),
(new ReflectionClass('Jymfony.Component.DateTime.Internal.Rule')).newInstanceWithoutConstructor(),
(new ReflectionClass('Jymfony.Component.DateTime.Internal.Rule')).newInstanceWithoutConstructor(),
(new ReflectionClass('Jymfony.Component.DateTime.Internal.Rule')).newInstanceWithoutConstructor(),
(new ReflectionClass('Jymfony.Component.DateTime.Internal.Rule')).newInstanceWithoutConstructor(),
(new ReflectionClass('Jymfony.Component.DateTime.Internal.Rule')).newInstanceWithoutConstructor(),
(new ReflectionClass('Jymfony.Component.DateTime.Internal.Rule')).newInstanceWithoutConstructor(),
(new ReflectionClass('Jymfony.Component.DateTime.Internal.Rule')).newInstanceWithoutConstructor(),
(new ReflectionClass('Jymfony.Component.DateTime.Internal.Rule')).newInstanceWithoutConstructor(),
(new ReflectionClass('Jymfony.Component.DateTime.Internal.Rule')).newInstanceWithoutConstructor(),
(new ReflectionClass('Jymfony.Component.DateTime.Internal.Rule')).newInstanceWithoutConstructor(),
],
null,
{
'Jymfony.Component.DateTime.Internal.RuleSet': {
['_name']: {
['0']: undefined,
},
['_rules']: {
['0']: [
o[1],
o[2],
o[3],
o[4],
o[5],
o[6],
o[7],
o[8],
o[9],
o[10],
o[11],
o[12],
o[13],
o[14],
],
},
['_cache']: {
['0']: {
['1997']: [
o[8],
o[9],
o[10],
],
['1998']: [
o[8],
o[9],
o[10],
],
['1999']: [
o[8],
o[9],
o[10],
],
['2000']: [
o[8],
o[9],
o[10],
],
['2001']: [
o[10],
o[11],
o[12],
],
['2009']: [
o[12],
o[13],
o[14],
],
['2010']: [
o[12],
o[13],
o[14],
],
['2014']: [
o[12],
o[13],
o[14],
],
['2015']: [
o[12],
o[13],
o[14],
],
},
},
},
'Jymfony.Component.DateTime.Internal.Rule': {
['_fromYear']: {
['1']: 1939,
['2']: 1939,
['3']: 1940,
['4']: 1941,
['5']: 1943,
['6']: 1944,
['7']: 1950,
['8']: 1950,
['9']: 1996,
['10']: 1996,
['11']: 2001,
['12']: 2001,
['13']: 2002,
['14']: 2002,
},
['_toYear']: {
['1']: 1939,
['2']: 1939,
['3']: 1940,
['4']: 1941,
['5']: 1943,
['6']: 1944,
['7']: 1950,
['8']: 1950,
['9']: 2000,
['10']: 2000,
['11']: 2001,
['12']: 2001,
['13']: Infinity,
['14']: Infinity,
},
['_inMonth']: {
['1']: 2,
['2']: 6,
['3']: 12,
['4']: 4,
['5']: 12,
['6']: 5,
['7']: 2,
['8']: 7,
['9']: 4,
['10']: 10,
['11']: 5,
['12']: 9,
['13']: 4,
['14']: 10,
},
['_on']: {
['1']: '5',
['2']: '25',
['3']: '9',
['4']: '1',
['5']: '16',
['6']: '1',
['7']: '12',
['8']: '30',
['9']: '1 %s this sun',
['10']: 'last sun %s',
['11']: '1 %s this sun',
['12']: 'last sun %s',
['13']: '1 %s this sun',
['14']: 'last sun %s',
},
['_at']: {
['1']: '0:00',
['2']: '0:00',
['3']: '0:00',
['4']: '0:00',
['5']: '0:00',
['6']: '0:00',
['7']: '0:00',
['8']: '0:00',
['9']: '2:00',
['10']: '2:00',
['11']: '2:00',
['12']: '2:00',
['13']: '2:00',
['14']: '2:00',
},
['_save']: {
['1']: 3600,
['2']: 0,
['3']: 3600,
['4']: 0,
['5']: 3600,
['6']: 0,
['7']: 3600,
['8']: 0,
['9']: 3600,
['10']: 0,
['11']: 3600,
['12']: 0,
['13']: 3600,
['14']: 0,
},
['_letters']: {
['1']: 'D',
['2']: 'S',
['3']: 'D',
['4']: 'S',
['5']: 'W',
['6']: 'S',
['7']: 'D',
['8']: 'S',
['9']: 'D',
['10']: 'S',
['11']: 'D',
['12']: 'S',
['13']: 'D',
['14']: 'S',
},
['_cache']: {
['1']: {},
['2']: {},
['3']: {},
['4']: {},
['5']: {},
['6']: {},
['7']: {},
['8']: {
['1950']: [
'001950-07-30T00:00:00',
'001950-07-30T00:00:00',
],
},
['9']: {
['1997']: [
'001997-04-06T02:00:00',
'001997-04-06T03:00:00',
],
['1998']: [
'001998-04-05T02:00:00',
'001998-04-05T03:00:00',
],
['1999']: [
'001999-04-04T02:00:00',
'001999-04-04T03:00:00',
],
['2000']: [
'002000-04-02T02:00:00',
'002000-04-02T03:00:00',
],
},
['10']: {
['1997']: [
'001997-10-26T02:00:00',
'001997-10-26T02:00:00',
],
['1998']: [
'001998-10-25T02:00:00',
'001998-10-25T02:00:00',
],
['1999']: [
'001999-10-31T02:00:00',
'001999-10-31T02:00:00',
],
['2000']: [
'002000-10-29T02:00:00',
'002000-10-29T02:00:00',
],
},
['11']: {
['2001']: [
'002001-05-06T02:00:00',
'002001-05-06T03:00:00',
],
},
['12']: {
['2001']: [
'002001-09-30T02:00:00',
'002001-09-30T02:00:00',
],
},
['13']: {
['2009']: [
'002009-04-05T02:00:00',
'002009-04-05T03:00:00',
],
['2010']: [
'002010-04-04T02:00:00',
'002010-04-04T03:00:00',
],
['2014']: [
'002014-04-06T02:00:00',
'002014-04-06T03:00:00',
],
['2015']: [
'002015-04-05T02:00:00',
'002015-04-05T03:00:00',
],
},
['14']: {
['2009']: [
'002009-10-25T02:00:00',
'002009-10-25T02:00:00',
],
['2010']: [
'002010-10-31T02:00:00',
'002010-10-31T02:00:00',
],
['2014']: [
'002014-10-26T02:00:00',
'002014-10-26T02:00:00',
],
['2015']: [
'002015-10-25T02:00:00',
'002015-10-25T02:00:00',
],
},
},
},
},
[
{
['offset']: -26632,
['dst']: false,
['abbrev']: 'LMT',
['until']: -1514739600,
['format']: 'LMT',
},
{
['offset']: -25200,
['dst']: false,
['abbrev']: 'MST',
['until']: -1343066400,
['format']: 'MST',
},
{
['offset']: -21600,
['dst']: false,
['abbrev']: 'CST',
['until']: -1234807200,
['format']: 'CST',
},
{
['offset']: -25200,
['dst']: false,
['abbrev']: 'MST',
['until']: -1220292000,
['format']: 'MST',
},
{
['offset']: -21600,
['dst']: false,
['abbrev']: 'CST',
['until']: -1207159200,
['format']: 'CST',
},
{
['offset']: -25200,
['dst']: false,
['abbrev']: 'MST',
['until']: -1191344400,
['format']: 'MST',
},
{
['offset']: -21600,
['dst']: false,
['abbrev']: 'CST',
['until']: -873828000,
['format']: 'CST',
},
{
['offset']: -25200,
['dst']: false,
['abbrev']: 'MST',
['until']: -661539600,
['format']: 'MST',
},
{
['offset']: -28800,
['dst']: false,
['abbrev']: 'PST',
['until']: 28800,
['format']: 'PST',
},
{
['until']: 915174000,
['ruleSet']: o[0],
['offset']: -25200,
['abbrev']: 'M%sT',
},
{
['offset']: -25200,
['dst']: false,
['abbrev']: 'MST',
['until']: Infinity,
['format']: 'MST',
},
],
[
0,
]
);
})();
;
|
import React from 'react';
import styles from '../css/banner.module.css';
const Banner = ({ title, info, children }) => {
return (
<div className={styles.banner}>
<h1>{title}</h1>
<p>{info}</p>
{children}
</div>
);
};
export default Banner;
|
"""*******************************************************
A python implementation of catsHTM.m
******************************************************"""
#print __doc__
import math
import tqdm
import numpy as np
from . import celestial
import scipy.io as sio
from . import params
import os.path
import h5py
from . import class_HDF5
import time
import pdb
#import time
d=dict() #this dictionnary containes the names of the index files loaded in search_htm_ind, and allowes us to avoid loading twice the same index file, which can be time consuming e.g. in a loop
# define FileNotFoundError for Python 2.7
try:
FileNotFoundError
except NameError:
FileNotFoundError = IOError
__all__=['cone_search','search_htm_ind','search_sortedlat','sources_match','htm_search_cone','xmatch_2cats','load_trix_by_ind','simplify_list','load_colcell','mfind_bin','match_cats','simplify2','simplify3','Example_QueryAllFun','read_ztf_HDF_matched'] #redefinition of '*' for import *
def get_CatDir(CatName):
if CatName == 'TMASS':
CatDir = '2MASS'
elif CatName == 'TMASSxsc':
CatDir = '2MASSxsc'
elif CatName == 'DECaLS':
CatDir = 'DECaLS/DR5'
elif CatName == 'GAIADR1':
CatDir = 'GAIA/DR1'
elif CatName == 'GAIADR2':
CatDir = 'GAIA/DR2'
elif CatName == 'GAIAEDR3':
CatDir = 'GAIA/DRE3'
elif CatName == 'GALEX':
CatDir = 'GALEX/DR6Plus7'
elif CatName == 'HSCv2':
CatDir = 'HST/HSCv2'
elif CatName == 'IPHAS':
CatDir = 'IPHAS/DR2'
elif CatName == 'NEDz':
CatDir = 'NED/20180502'
elif CatName == 'SDSSDR10':
CatDir = 'SDSS/DR10'
elif CatName == 'SDSSoffset':
CatDir = 'SDSS/DR14offset'
elif CatName == 'SpecSDSS':
CatDir = 'SpecSDSS/DR14'
elif CatName == 'SAGE':
CatDir = 'Spitzer/SAGE'
elif CatName == 'IRACgc':
CatDir = 'Spitzer/IRACgc'
elif CatName == 'UKIDSS':
CatDir = 'UKIDSS/DR10'
elif CatName == 'VISTAviking':
CatDir = 'VISTA/Viking/DR2'
elif CatName == 'VSTatlas':
CatDir = 'VST/ATLAS/DR3'
elif CatName == 'VSTkids':
CatDir = 'VST/KiDS/DR3'
elif CatName =='ztfSrcLCDR1':
CatDir='./ZTF/SrcLCDR1'
elif CatName=='ztfDR1var':
CatDir = './ZTF/ztfDR1var'
elif CatName not in ['AKARI', 'APASS', 'Cosmos', 'FIRST', 'NVSS', 'PS1', 'PTFpc', 'PGC', 'ROSATfsc', 'SkyMapper', 'UCAC4',
'WISE', 'XMM','NOAO']:
raise ValueError('you need to specify a valid name for the catalog (see README file for list of available catalogs and names)')
else:
CatDir = CatName
return CatDir
def cone_search(CatName,RA,Dec,Radius,catalogs_dir='./data',RadiusUnits='arcsec',IndexFileTemplate=params.IndexFileTemplate,CatFileTemplate=params.CatFileTemplate
,htmTemplate=params.htmTemplate,NcatinFile=params.NcatinFile,IndexVarname=None,ColRa = 0,ColDec=1,OnlyCone=True,
ColCelFile = params.ColCelFile,OutType= 'np_array',verbose=False):
"""Description: Perform a cone search around RA/Dec on a local catalog in HDF5 format sorted into HTM.
Input : - Catalog name (e.g., 'GAIADR1').
- J2000.0 R.A. [radians, [H M S], or sexagesimal string].
- J2000.0 Dec. [radians, [sign D M S], or sexagesimal string].
- Search radius [arcsec].
- Optionnal:RadiusUnits - Radius units. Default is 'arcsec'. DO NOT CHANGE THIS DEFAULT
IndexFileTemplate - Index Catalog name template. Default is '%s_htm.hdf5'.
CatFileTemplate - Catalog name template. Default is '%s_htm_%06d.hdf5'.
htmTemplate - HTM dataset template name. Default is 'htm_%06d'.
NcatInFile - Maximum number of Datasets in file.Default is 100.
IndexVarName - Default is None.
ColRA - Default is 1.
ColDec - Default is2.
OnlyCone - Return only sources within cone. If false will return also some objects outside cone. Default is true.
ColCellFile - Default is '%s_htmColCell.mat'.
By : Maayane Soumagnac (original Matlab function by Eran Ofek) Feb 2018
Output : a numpy array where each line is the catalog line for the sources inside the cone """
#start_time=time.time()
if verbose==True:
print('*************')
print('Catalog: {0}; cone radius: {1} arcsec; cone center: (RA,DEC)=({2},{3})'.format(CatName,Radius,RA,Dec))
print('*************')
root_to_data=catalogs_dir+'/'
CatDir=get_CatDir(CatName)
Rad = 180. / math.pi
#if RadiusUnits=='arcsec':
Radius=Radius/(Rad*3600) #converts arcsec radius into radians radius
ColCelFile=ColCelFile % CatName
IndexFilename=IndexFileTemplate % CatName
#print(root_to_data+CatDir+'/'+ColCelFile)
if os.path.isfile(root_to_data+CatDir+'/'+ColCelFile)==True:
test = sio.loadmat(root_to_data+CatDir+'/'+ColCelFile)
#print(test)
if np.shape(test['ColCell'])[1]<np.shape(test['ColCell'])[0]:
#test=test.transpose()
Ncol=np.shape(test['ColCell'])[0]
else:
Ncol=np.shape(test['ColCell'])[1]
else:
raise FileNotFoundError("you need to specify a valid path for the HDF5 catalogs location")
### computes the list of index of the trixels which intercept the cone
ID=search_htm_ind(IndexFilename,RA,Dec,Radius,catalogs_dir,VarName=IndexVarname,CatDir=CatDir,verbose=verbose) #list of IDs of winners leaf
### computes the catalog with the sources located in those trixels
ID_matlab=ID+1
FileID=np.floor(ID_matlab/NcatinFile)*NcatinFile
Nid=len(ID_matlab) #number of leaf intercepting the circle
if Nid==0:#if none of the catalog's trixel intercept the cone
if verbose==True:
print('INFO: the cone does not intercept the catalog')
cat_onlycone=np.array([])
else:
FileName_0 = CatFileTemplate % (CatName, FileID[0])
DataName_0 = htmTemplate % ID_matlab[0]
cat = class_HDF5.HDF5(root_to_data + CatDir + '/' + FileName_0).load(DataName_0, numpy_array=True).T
#print('shape of cat is',np.shape(cat))
for Iid in range(Nid)[1:]:
FileName=CatFileTemplate % (CatName, FileID[Iid])
DataName=htmTemplate % ID_matlab[Iid]
cat=np.vstack((cat, class_HDF5.HDF5(root_to_data + CatDir + '/' + FileName).load(DataName, numpy_array=True).T))
#if OnlyCone==True:
D=celestial.sphere_distance_fast(RA,Dec,cat[:,ColRa],cat[:,ColDec])#[0]
cat_onlycone=cat[D<Radius,:]
### a colomne with the cell names:
if cat_onlycone.ndim>1:
ColCell=np.empty((np.shape(cat_onlycone)[1]),dtype=object)
ColUnits=np.empty((np.shape(cat_onlycone)[1]),dtype=object)
else:
ColCell=np.empty((Ncol),dtype=object)
ColUnits=np.empty((Ncol),dtype=object)
#print(np.shape(test['ColCell']))
#print(np.shape(ColCell))
#print(np.shape(cat_onlycone))
if np.shape(test['ColCell'])[1]>np.shape(test['ColCell'])[0]:
for i,j in enumerate(test['ColCell'][0,:]):
#print(test['ColCell'][0,i][0])
ColCell[i]=str(test['ColCell'][0,i][0])
for i,j in enumerate(test['ColUnits'][0,:]):
if len(test['ColUnits'][0,i])>0:
ColUnits[i]=str(test['ColUnits'][0,i][0])
else:
ColUnits[i]=' '
else: #rare cases: Cosmos and TMASSxsc
for i,j in enumerate(test['ColCell'][:,0]):
#print(str(test['ColCell'][i][0][0]))
ColCell[i]=str(test['ColCell'][i][0][0])
for i,j in enumerate(test['ColUnits'][0,:]):
if len(test['ColUnits'][0,i])>0:
ColUnits[i]=str(test['ColUnits'][0,i][0])
else:
ColUnits[i]=' '
return cat_onlycone,ColCell, ColUnits
def search_sortedlat(Cat,Long,Lat,Radius):
"""Description: .
Input : - Catalog name (e.g., 'GAIADR1').
-
-
-
- Optionnal:-Radius [arcsec]: default is 2 arcsec
By : Maayane Soumagnac (original Matlab function by Eran Ofek) Oct 2020
Output : """
Ncat=np.shape(Cat)[0]
Inear=mfind_bin(Cat[:,1],[Lat-Radius,Lat+Radius])
Ilow=float(Inear[0])
Ihigh=min(Ncat,float(Inear[1]+1))#add 1 because of the way mfind_bin works
#print('Ihigh',Ihigh)#ok
#print('Ilow',Ilow)#ok
Dist=celestial.sphere_dist_fast(Long,Lat,Cat[int(Ilow-1):int(Ihigh),0],Cat[int(Ilow-1):int(Ihigh),1])[0]
#print('Dist is',Dist)
Ind=Ilow-1+np.argwhere(Dist<=Radius)
return Ind
def sources_match(CatName,Cat,SearchRadius_arcs=2,catalog_dir='./data'):
"""Description: .
Input : - Catalog name (e.g., 'GAIADR1').
-
-
-
- Optionnal:-Radius [arcsec]: default is 2 arcsec
By : Maayane Soumagnac (original Matlab function by Eran Ofek) Oct 2020
Output : """
Rad = 180. / math.pi
SearchRadius_rad=SearchRadius_arcs/(Rad*3600) #converts arcsec radius into radians radius
Ra=Cat[:,0] # in rad!
Dec=Cat[:,1] # in rad!
MedRa=np.nanmedian(Ra)
MedDec=np.nanmedian(Dec)
D=celestial.sphere_dist_fast(MedRa,MedDec,Ra,Dec)[0]
#print('D is',D)
Radiusi=np.max(D)*(1+10*np.spacing(1))
Radius=Radiusi*Rad*3600 #converts to arcsec
#print('Radius',Radius)
#print('MedRa',MedRa)
#print('MedDec',MedDec)
CatHunsorted,ColCelH,ColUnitsH=cone_search(CatName,MedRa,MedDec,Radius,catalogs_dir=catalog_dir)
#print('CatHunsorted:',CatHunsorted)
if CatHunsorted.ndim<2:
CatH = CatHunsorted
else:
CatH=CatHunsorted[np.argsort(CatHunsorted[:, 1])]#sort by declination
#print('ColCelH',ColCelH)
#pdb.set_trace()
Nsrc = np.shape(Cat)[0]
CatM={}
CatM['Match'] = np.empty((Nsrc,len(ColCelH)))
Cat=np.empty((Nsrc,len(ColCelH)+2))
Cat[:,:]=np.nan
CatM['Match'][:,:]=np.nan
CatM['Dist'] = np.empty((Nsrc),dtype=object)
CatM['Dist'][:]=np.nan
CatM['Nmatch'] = np.zeros((Nsrc, 1))
if (len(CatH)!=0):
for Isrc in range(Nsrc):
#print('I am running sortedlat')
Ind = search_sortedlat(CatH, Ra[Isrc], Dec[Isrc], SearchRadius_rad).astype(int)
#print('I am done running sortedlat')
if (len(Ind)!=0):
#print('Isrc',Isrc)
#print('Ind',Ind)
Dist = celestial.sphere_dist_fast(Ra[Isrc], Dec[Isrc], CatH[Ind, 0],CatH[Ind, 1])[0]
Distmin=Dist
Nmatch = len(Ind)
if (Nmatch > 1):
# print('there is more than one match')
# print('Dist',Dist)
# print('np.min(Dist)',np.min(Dist))
Distmin = np.min(Dist)
# print('type(Dist)',type(Dist))
# print('np.shape(Dist)',np.shape(Dist))
# print('np.argmin(Dist)',np.argmin(Dist))
MinInd=np.argmin(Dist)
Ind = Ind[MinInd]
CatM['Match'][Isrc,:] = CatH[Ind,:]
CatM['Dist'][Isrc] = Distmin
CatM['Nmatch'][Isrc] = Nmatch
Cat[Isrc,:-2]=CatH[Ind,:]
Cat[Isrc,-1]=Distmin
Cat[Isrc,-2]=Nmatch
CatM['ColCell'] = ColCelH
return CatM, Cat
def search_htm_ind(Filename,Long,Lat,Radius,path,VarName=None,CatDir=None,verbose=False):
"""Description: wrapper of htm_search_cone, which select from the vector outputed by htm_search_cone only the
triangles where there are actually sources.
Input : - Filename: the name of the index_file, e.g. FIRST_htm.hdf5
Output :
By : Maayane Soumagnac (original Matlab function by Eran Ofek) Feb 2018
"""
if VarName==None:
cat_name=Filename.split('_')[0]
VarName=cat_name+'_HTM'
if VarName not in list(d.values()):
if verbose==True:
print('I have not seen the index file corresponding to {0} yet'.format(VarName))
DataHTM_indexfile = class_HDF5.HDF5(path + '/' + CatDir + '/' + Filename).load(VarName,
numpy_array=True) # load the indexfile content
d[str(VarName)+'_name']=VarName
d[str(VarName)+'_array']= DataHTM_indexfile
else:
if verbose==True:
print('I have already loaded the index file corresponding to {0}'.format(VarName))
DataHTM_indexfile = d[str(VarName) + '_array']
'''
#A working alternative to the dictionnay d, with globals()
if VarName not in list(globals().values()):
if verbose==True:
print('I have not see the index file corresponding to {0} yet'.format(VarName))
print(path + '/' + CatDir + '/' + Filename)
print(VarName)
DataHTM_indexfile = class_HDF5.HDF5(path + '/' + CatDir + '/' + Filename).load(VarName,
numpy_array=True) # load the indexfile content
globals()[str(VarName)+'_name'] = VarName
globals()[str(VarName)+'_array']= DataHTM_indexfile
else:
if verbose==True:
print('I have already loaded the index file corresponding to {0}'.format(VarName))
DataHTM_indexfile = globals()[str(VarName)+'_array']
'''
ID=htm_search_cone(DataHTM_indexfile,Long,Lat,Radius)#,Son_index=Son_index,PolesLong_index=PolesLong_index,PolesLat_index=PolesLat_index) # returns a list of the ID of the winners mesh, i.e. the meshes that intercept the circle
ID_array=np.array(ID)
ID_w_sources=ID_array[DataHTM_indexfile[12,ID]>0] #ou l inverse?
return ID_w_sources
def htm_search_cone(IndexFile_data,Long,Lat,Radius,Ind=None,Son_index=np.arange(2,6),PolesLong_index=np.arange(6,11,2),PolesLat_index=np.arange(7,12,2)):
#print('I am running htm_search_cone')
"""Description: Search for all HTM leafs intersecting a small circles
Input :-Either a table of HTM data or an open HDF5 object in which the HTM data is stored
-Longitude (radians) to search
-Latitutde (radians) to search
-Radius of the small circle
Output : a vector of indexes of the winner(s):the "adress" in the indexfile of the smallest leaf(s) intercepting the cone
By : Maayane Soumagnac (original Matlab function by Eran Ofek) Feb 2018
"""
if Ind is None:
Sons=np.arange(8)
else:
Sons=Ind.astype(int)
ID=[]
Nsons=len(Sons)
PolesLong=np.zeros((3,Nsons)) #3 lines, Nsons colomns, on veut mettre a chaque colomne les longitudes des poles du mesh
PolesLat=np.zeros((3, Nsons)) #3 lignes, Nsons colomnes
for i in range(Nsons):#OPTIMIZE
PolesLong[:,i]=IndexFile_data[PolesLong_index[:],Sons[i]] # array where each colomn is the 3 poles longitudes of a son mesh HERE: THIS? OR INVERSE?
PolesLat[:,i]=IndexFile_data[PolesLat_index[:],Sons[i]] # array where each colomn is the 3 poles latitude of a son mesh HERE: THIS? OR INVERSE?
Flag=celestial.cone_in_polysphere(PolesLong,PolesLat,Long,Lat,Radius) #check if the cone intercept any of the sons meshes
for i in range(Nsons): #OPTIMIZABLE?
if Flag[i]==1: #i.e. if the cone overlap the son with index i
if np.isnan(IndexFile_data[Son_index[:],Sons[i]]).all()==True:# there are nans in the index_file at the son's index, which means the data is where you are and you cannot go further in the tree
ID.append(Sons[i])
else:
Ind = IndexFile_data[Son_index[:], Sons[i]] - 1.
#RECURION IS HERE
ID.extend(htm_search_cone(IndexFile_data,Long,Lat,Radius,Ind=Ind))
return ID
def get_index_filename(CatName):
"""Description: gets the name of the index file for
Input :- Catalog basename (e.g. 'PS1')
Output :-name of the index filename : <CatBaseName>_htm.hdf5 (carefull! in the paper we wrote this as <CatBaseName>_htm_ind.hdf5) (e.g. 'PS1_htm.hdf5')
-a string <CatBaseName>_HTM (e.g. 'PS1_HTM'), which is the key of the dataset, in the HDF5 file, that contains the 2 columns of the index file
example: [IndexFileName,IndexVarName]=catsHTM.get_index_filename('PS1')
By : Maayane Soumagnac (original Matlab function by Eran Ofek) August 2018
"""
IndexFileName=CatName+'_htm.hdf5'
IndexVarName=CatName+'_HTM'
return IndexFileName,IndexVarName
def load_HTM_ind(Filename,VarName,catalogs_dir='./data',CatDir=None):
"""Description: load the content of the catalog index file into a dictionnary
Input :- index file: an HDF5 file which exists per catalog, containing a 2D array with as many columns as trixels (the index=the column indixe+1: index1 is in columns 0)and each line being:
[level,Father index,son1 index,son2 index,son3 index,son4 index, Pole1 long, Pole1 lat,Pole2 long, Pole2 lat,Pole3 long, Pole3 lat, either Nan or the data].
- The name of the dataset with the actual 2D array stored in the index file. Default is '<CatName>_HTM'
Output :- A list of N_trixels dictionnaries containing the 2D matrix info
example:
By : Maayane Soumagnac (original Matlab function by Eran Ofek) August 2018"""
#print('I am looking for the data in',catalogs_dir + '/' + CatDir + '/' +Filename)
Data=class_HDF5.HDF5(catalogs_dir + '/' + CatDir + '/' +Filename).load(VarName,numpy_array=True)#as many columns as trixels, 13 lines with:
# [index,Father index,son1 index,son2 index,son3 index,son4 index, Pole1 long, Pole1 lat,Pole2 long, Pole2 lat,Pole3 long, Pole3 lat, either Nan or the data]
N_trixels=np.shape(Data)[1]
#print('there are {0} trixels'.format(N_trixels))
#load this data into a dictionnaries
#each trixel is a dictionnary
HTM_list=[]#will end up being a list of N_trixels dictionnaries
for i in range(N_trixels):
trixel = dict()
trixel['level']=Data[0,i]#line 1 of column 0
if np.isnan(np.array(Data[1,i])).all() == True:
trixel['father']=[]
else:
trixel['father']=Data[1,i]
if np.isnan(np.array(Data[2,i])).all() == True:
trixel['son']=[]
else:
trixel['son']=Data[2:6,i]
trixel['PolesCoo'] = np.zeros((3, 2))
trixel['PolesCoo'][0, 0] = Data[6,i]
trixel['PolesCoo'][0, 1] = Data[7,i]
trixel['PolesCoo'][1, 0] = Data[8,i]
trixel['PolesCoo'][1, 1] = Data[9,i]
trixel['PolesCoo'][2, 0] = Data[10,i]
trixel['PolesCoo'][2, 1] = Data[11,i]
trixel['Nsrc']=Data[12,i]
HTM_list.append(trixel)
return HTM_list,Data
def load_colcell(CatDir,CatName):
ColCelFile = CatDir+'/'+CatName + '_htmColCell.mat'
test = sio.loadmat(ColCelFile)
if np.shape(test['ColCell'])[1] < np.shape(test['ColCell'])[0]:
# test=test.transpose()
Ncol = np.shape(test['ColCell'])[0]
else:
Ncol = np.shape(test['ColCell'])[1]
ColCell = np.empty((Ncol), dtype=object)
ColUnits = np.empty((Ncol), dtype=object)
if np.shape(test['ColCell'])[1] < np.shape(test['ColCell'])[0]:
# test=test.transpose()
Ncol = np.shape(test['ColCell'])[0]
for i, j in enumerate(test['ColCell'][:, 0]):
# print(str(test['ColCell'][i][0][0]))
ColCell[i] = str(test['ColCell'][i][0][0])
for i, j in enumerate(test['ColUnits'][0, :]):
if len(test['ColUnits'][0, i]) > 0:
ColUnits[i] = str(test['ColUnits'][0, i][0])
else:
ColUnits[i] = ' '
else:
Ncol = np.shape(test['ColCell'])[1]
for i, j in enumerate(test['ColCell'][0, :]):
# print(test['ColCell'][0,i][0])
ColCell[i] = str(test['ColCell'][0, i][0])
for i, j in enumerate(test['ColUnits'][0, :]):
if len(test['ColUnits'][0, i]) > 0:
ColUnits[i] = str(test['ColUnits'][0, i][0])
else:
ColUnits[i] = ' '
return ColCell, ColUnits
def load_trix_by_ind(CatName,index,SearchParValue=None,num=100,catalogs_dir='./data',Ncol=None,Verbose=True):#load_cat in Eran's library
"""Description: given a catalog basename and the index of a trixel, load the content of the corresponding trixel dataset to a numpy array
Input :- CatName
- trixel index, or a a dataset name
- A two element vector of lower and upper value. Only lines in which the sorted parameter is between the low and high value will be retrieved.
If empty, retrieve all lines. Default is empty.
-number of columns in the catalog.
Output :-a numpy array with the content of the trixel, Ind ?
example:
By : Maayane Soumagnac (original Matlab function by Eran Ofek) August 2018"""
if isinstance(index,str)==False:
names=get_file_dataset_from_trixel_id(CatName,index,NfilesinHDF=num,Verbose=Verbose)
Filename=names[0]
Data_set_name=names[1]
CatDir=get_CatDir(CatName)
if SearchParValue is None:
trixel_data=class_HDF5.HDF5(catalogs_dir + '/'+ CatDir + '/' + Filename).load(Data_set_name, numpy_array=True).T
Ind=1
else:
#load the index file
VarIndStr=Data_set_name+'_Ind' #the name of the index file
if Verbose==True:
print('Filename is',Filename)
DataInd=class_HDF5.HDF5(catalogs_dir+'/'+CatDir+'/'+Filename).load(VarIndStr,numpy_array=True,Verbose=Verbose).T#the content f the index file
if len(DataInd)>0:
Ndi=np.shape(DataInd)[0]
I1=bin_sear(DataInd[:,1],SearchParValue[0])
I2=bin_sear(DataInd[:,1],SearchParValue[1])
#print('before the if, I1 is {0} and I2 is {1}'.format(I1,I2))
Ind=DataInd[I1,0] #the
Offset=np.append(DataInd[I1,0]-1,0)
if I1==I2:
I2=I2+1
I2=min(I2,Ndi-1)
Block=[1+DataInd[I2,0]-DataInd[I1,0],Ncol]
#print('Block is',Block)
trixel_data=class_HDF5.HDF5(catalogs_dir+'/'+CatDir+'/'+Filename).load(Data_set_name,Offset=Offset,Block=Block,numpy_array=True,Verbose=Verbose).T
#seach the indexes of the
else:
trixel_data=np.array([])
Ind=None
return trixel_data,Ind
def bin_sear(X,Val): #Util.find.of eran
"""Description:
Input :- sorted vector (ascending)
- Value to search
Output :- Index of closest value
example:
By : Maayane Soumagnac (original Matlab function by Eran Ofek) August 2018"""
N=len(X)
if N==1:
IndVal=1
else:
Ind1=0
Ind2=N-1
IndM=math.floor(0.5*N)
Y1=X[Ind1]
Y2=X[Ind2]
Ym=X[IndM]
Found=0
while Found==0:
if Val>Ym:
Ind1=IndM
Y1=X[Ind1]
if Ind2-Ind1>=2:
IndM= math.floor(0.5*(Ind2+Ind1))
else:
Found=1
if abs(Val-Y1)<abs(Val-Y2):
IndVal=Ind1
else:
IndVal=Ind2
Ym=X[IndM]
elif Val<Ym:
Ind2=IndM
Y2=X[Ind2]
if Ind2-Ind1>=2:
IndM=math.floor(0.5*(Ind1+Ind2))
else:
Found=1
if abs(Val-Y1)<abs(Val-Y2):
IndVal=Ind1
else:
IndVal=Ind2
Ym=X[IndM]
else:
Found=1
IndVal=IndM
return IndVal
def mfind_bin(X,Vals):
"""Description: Binary search on a vector running simolutnously on
multiple values. A feature of this program is that it
you need to add 1 to the index in order to make sure
the found value is larger than the searched value.
Input :- Sorted column vector.
- Row vector of values to search.
Output :- Indices of nearest values.
example:
By : Maayane Soumagnac (original Matlab function by Eran Ofek) August 2018"""
Nvals=len(Vals)
N=len(X)
I1=np.ones(Nvals)
I2=N*np.ones(Nvals)
Im=np.floor(0.5*(I1+I2)).astype(int)
#print('Im is',Im)
PrevIm=np.zeros(np.shape(Im)[0]).astype(int)
#print('PrevIm is', PrevIm)
#pdb.set_trace()
if np.shape(X)[0]<2:
if X.size==0:
Im=[]
else:
Im=np.ones(Nvals).astype(int)
else:
while np.all(Im==PrevIm)==False:
#print(np.all(Im==PrevIm))
#print('X[Im-1] is',X[Im-1])
FlagU=Vals>X[Im-1]
#print('FlagU is',FlagU)
FlagD=np.invert(FlagU)
#print('FlagD is',FlagD)
I1[FlagU]=Im[FlagU]
I2[FlagD]=Im[FlagD]
PrevIm=Im
Im=np.floor(0.5*(I1+I2)).astype(int)
#print('Im is',Im)
#print('PrevIm is',PrevIm)
return Im
def get_file_dataset_from_trixel_id(CatName,index,NfilesinHDF,Verbose=True):#get_file_var_from_htmid in Eran's library
"""Description: given a catalog basename and the index of a trixel and the number of trixels in an HDF5 file,
create the trixel dataset name
Input :- CatName
- index
- NfilesinHDF: number of datasets in an HDF5 files (default is 100)
Output :- Filename: name of the HDF5 file where the trixel_dataset is stored
- Datasetname: name of the trixel_dataset
example:
By : Maayane Soumagnac (original Matlab function by Eran Ofek) August 2018"""
if Verbose==True:
print('index is',index)
num_file=math.floor(index/NfilesinHDF)*NfilesinHDF #equivalent to index//Nfiles*Nfiles
Filename='%s_htm_%06d.hdf5' % (CatName, num_file)
DatasetName='htm_%06d' % index
return Filename,DatasetName
def Number_of_trixels(Catname,catalogs_dir='./data',CatDir=None):
"""Description: finds the number of trixels for a given catalod
Input :- catalog basename
Output :- number of trixels for this catalog
example:
By : Maayane Soumagnac (original Matlab function by Eran Ofek) August 2018"""
IndexFileName = get_index_filename(Catname)[0] # name of the index file associated with Catname
IndexVarName=get_index_filename(Catname)[1] # name of the data set containing the index filename content
List_of_dict=load_HTM_ind(IndexFileName,IndexVarName,catalogs_dir=catalogs_dir,CatDir=CatDir)[0]
Number_of_trixels_in_cat=len(List_of_dict)
return Number_of_trixels_in_cat
def simplify_list(val):
if isinstance(val, list) == False:
return val
else:
if len(val) > 1:
return val
else:
return simplify_list(val[0])
def simplify2(x):
IDc=[]
for i in x:
if isinstance(i, (list, tuple, np.ndarray)) == True:
for j in i:
IDc.append(j)
else:
IDc.append(i)
return IDc
#return simplify2(IDc)
def simplify3(x):
if isinstance(x[0],(list, tuple, np.ndarray)) == False:
return x
else:
y=simplify2(x)
#print(y)
return simplify3(y)
def match_cats(Cat,Refcat,Radius=2,RadiusUnits='arcsec'):
"""Description: translation of VO.search.match_cats of Eran. Given two spherical coordinate catalogs. - for each entry
in the reference catalog (second input argument), search for all nearby sources in the catalog (first input).
Input :- A catalog sorted by declination. Ra and Dec in Rad
- A reference catalog. Ra and Dec in rad
- 'Radius' - Search radius. This is either a scalar or a vector which length is identical to that of the reference
catalog (second input). If a vector than each source in the reference catalog may have a different search radius.
Default is 2 (arcsec).
- 'RadiusUnits' - Search radius units. See convert.angular for options. Default is 'arcsec'.
Output :-Vec: a dictionnary with the following keys
Vec['Nfound']= A vector, the size of RefCat, with the number of sources found in the catalog Cat that are within the search radius from the source with same indice in refcat. in the reference catalog.
Vec['MinDist']=A vector, the size of RefCat, with the minimum distance (radians) of matched sources in Cat to the source of same indice in RefCat. NaN if not found.
- Res: a list of dictionnaries (one item per *matched* refernce source! this list is not the size of cat1, it is the size of the
number of objects in cat1 that DO have at least one cross-matched object in cat2):
Res['IndRef']=Index of source in reference catalog.
Res['IndCat']=List of indices in the catalog that are matched to
% the 'IndRef' source of the reference catalog.
Res['Dist']= Vecor of angular distances (radians) for each one
% of the sources indicated in 'IndCat'.
Res['Num']=Number of sources within search radius
- IndCatMinDist: vector, the size of Refcat, with the indice of the cat2 nearest sources to the cat1 source of indice Res[Indref]. NaN if no source was found
example:
By : Maayane Soumagnac (original Matlab function by Eran Ofek) August 2018"""
#print('I am running matchcats')
if RadiusUnits=='rad':
Radius=Radius
if RadiusUnits=='arcsec':
Radius=math.pi*Radius/(180.*3600.)
Ncat=np.shape(Cat)[0]
#print('Ncat is',Ncat)#ok
#print('Refcat is',Refcat)
Nref=np.shape(Refcat)[0]
#print('Nref is', Nref)#ok
Radius=Radius*np.ones(Nref)
Res=[]
Iuppx=mfind_bin(Cat[:,1],Refcat[:,1]+Radius) #only if second column is dec!
Ilowx=mfind_bin(Cat[:,1],Refcat[:,1]-Radius) #only if second column is dec!
#print('Iupx is',Iuppx)#ok
#print('Ilowx is',Ilowx)#ok
Ilow=np.zeros(np.shape(Ilowx)[0])
for r,s in enumerate(Ilowx):
Ilow[r]=max(1,Ilowx[r])
#Ilow=np.max(1,Ilowx)
Iupp=np.zeros(np.shape(Iuppx)[0])
for r,s in enumerate(Iuppx):
Iupp[r]=min(Ncat,Iuppx[r]+1)
#print('Iup is',Iupp)#ok
#print('Ilow is',Ilow)#ok
Ncand=Iupp-Ilow
Ic=np.array(np.where(Ncand>=1))[0]
#print('Ic is',Ic)
#print(np.shape(Ic))
#print('Ic is',Ic)#index where condition verified, same as matlab one -1
Nc=np.shape(Ic)[0]
#print('Nc is',Nc)
#pdb.set_trace()
Vec=dict()
Vec['Nfound']=np.zeros(Nref)
#vectornan=np.empty(Nref)
#vectornan[:]=np.nan
Vec['MinDist']=np.full(Nref, np.nan)#vectornan
Vec['MinPa']=np.full(Nref, np.nan)#vectornan
K=0
IndCatMinDist=np.full(Nref, np.nan)#vectornan
for Icr in range(Nc):
#print("Vec['MinDist']5 is", Vec['MinDist'])
#print('Nc is',Nc)
Iref=Ic[Icr]
#print('Iref is',Iref)#ok
#pdb.set_trace()
#print('Iref, type:{0},{1}'.format(Iref,type(Iref)))
#print('Ilow[Iref], type:{0},{1}'.format(Ilow[Iref], type(Ilow[Iref])))
#print('Iup[Iref], type:{0},{1}'.format(Iupp[Iref], type(Iupp[Iref])))
#print('Iupp[Iref]-Ilow[Iref]+1, type:{0},{1}'.format(Iupp[Iref]-Ilow[Iref]+1, type(Iupp[Iref]-Ilow[Iref]+1)))
#print(Ilow[Iref].is_integer())
if (Ilow[Iref].is_integer()==False) | (Iupp[Iref].is_integer()==False) :
raise Exception('Ilow[Iref] and Iupp[Iref] should be integers')
Icat=np.linspace(int(Ilow[Iref]),int(Iupp[Iref]),int(Iupp[Iref]-Ilow[Iref]+1)).astype(int)
#print('Icat is',Icat)#ok
#print('Cat[Icat-1,0] is',Cat[Icat-1,0])#ok
#print('Cat[Icat-1,1] is',Cat[Icat-1,1])#ok
#print('Refcat[Iref,0]',Refcat[Iref,0])#ok
#print( 'Refcat[Iref,1]) is',Refcat[Iref,1])#ok
Dist=celestial.sphere_dist_fast(Cat[Icat-1,0],Cat[Icat-1,1],Refcat[Iref,0],Refcat[Iref,1])[0]
#print('In matchcats, Dist is',Dist)
#print('Radius[Iref] is',Radius[Iref])
IndRelative=np.where(Dist<=Radius[Iref])[0]
IndCat=Ilow[Icr]-1+IndRelative
#print('IndRelative is',IndRelative)#ok
#print('IndCat is',IndCat)#ok
Vec['Nfound'][Iref]=np.shape(IndCat)[0]#ok
#print("Vec['Nfound'][Iref] is",Vec['Nfound'][Iref])#ok
#pdb.set_trace()
if Vec['Nfound'][Iref]>0:
Vec['MinDist'][Iref]=np.min(Dist[IndRelative])
MinInd=np.argmin(Dist[IndRelative])
Resi=dict()
K=K+1
Resi['IndCat']=IndCat
Resi['IndRef']=Iref
Resi['Num']=np.shape(IndCat)[0]
Resi['Dist']=Dist[IndRelative]
#print("Resi['Dist']",Resi['Dist'])
Res.append(Resi)
#print("Vec['MinDist'] 1.5 is", Vec['MinDist'])
IndCatMinDist[Iref]=IndCat[MinInd]
##print('IndCatMinDist[Iref] is {0} and p.min(Dist[IndRelative]) is {1}'.format(IndCatMinDist[Iref],np.min(Dist[IndRelative])) )
# #print("Vec['MinDist'] 1.8 is", Vec['MinDist'])# ca met IndCatMinDist[Iref] dans Vec['MinDist'][Iref]
# print("Vec['MinDist'] 2 is", Vec['MinDist'])
#print("Vec['MinDist'] 3 is", Vec['MinDist'])
#pdb.set_trace()
#print("Vec['MinDist'] 4 is", Vec['MinDist'])
#pdb.set_trace()
#print('In matchcat, Ind is',Res)
return Vec,Res,IndCatMinDist #Match,Ind,IndCatMinDist
def Save_cross_matched_catalogs(Cat1,Cat2Matched,output_dir=None):
"""Description: save the outputs of xmatch_2cats, in a directory with
Input :- Catalog 1 basename
- Catalog 2 basename
-Search_radius: default is 2
-Search_radius_units: default is arcsec
-QueryFun: function to be applied to the catalog
-QUeryFunPar: parameters for QueryFun
Output :
example:
By : Maayane Soumagnac (original Matlab function by Eran Ofek) August 2018
"""
'''
def Example_QueryAllFun(Cat1,Ind,Cat2,IndCatMinDist,i):
print('I am running Example_QueryAllFun')
print('Cat1 is',Cat1)
print('Ind is',Ind)
print('Cat2 is',Cat2)
print('IndCatMinDist is',IndCatMinDist)
np.save("./Cat1_"+str(i)+'.txt',Cat1)
return Cat1
'''
def Example_QueryAllFun(Cat1,Ind,Cat2,IndCatMinDist,i,additionnal_args=None):
print('****** I am running Example_QueryAllFun *******')
print("Cat1, the content of the catalog_1's trixel is",Cat1)
print("Cat2, the content of a catalog_2' trixel overlapping with Cat1 is", Cat2)
print("Ind is a list of dictionnaries, with one dictionnary per Cat1's object having one or more counterparts in Cat2; ")
print("""Ind[i]["IndRef"]=Index of the Cat1's source having one or more counterpart in Cat2""")
print("""Ind[i]["IndCat"]=List of indixes of the Cat2's counterparts.""")
print("""Ind[i]["Dist"]= Vecor of angular distances (radians) between the Cat1's source and its counterparts in Cat2""")
print('Ind:',Ind)
print("IndCatMinDist is a vector, with as many elements as lines in Cat1, with 'nan' at lines where there is no counterpart in Cat2, and at line where there is, the catalog_2's index of the closest counterpart")
print('IndCatMinDist:',IndCatMinDist)
if additionnal_args is not None:
np.savetxt(additionnal_args[0]+"/Cat1_"+str(i)+'.txt',Cat1)
else:
np.savetxt("./Cat1_" + str(i) + '.txt', Cat1)
print('***********************************************')
print('press "c" to continue, "q" to quit')
pdb.set_trace()
return Cat1
def xmatch_2cats(Catname1,Catname2,Search_radius=2,QueryAllFun=None,QueryAllFunPar=None,
catalogs_dir='./data',Verbose=False,save_results=False,save_in_one_file=True,
save_in_separate_files=True,output='./cross-matching_results',time_it=True,Debug=False):
"""Description: cross match two HDF5/HTM catalogs: for each source in the first catalog, the index of the nearest source in the second catalog
(nearest within some specified distance) is saved.
Input :- Catalog 1 basename
- Catalog 2 basename
-Search_radius: default is 2 (in arcsec)
-QueryFun: function to be applied to the catalog
-QUeryFunPar: parameters for QueryFun
-Verbose: set to True if yu want the code to tell you what it is doing at each step and output intermediate outputs
-save_results: if True the the cross-matching pieces of catalog_1 and catalog_2 will be saved. Beware: only on object of catalog 2 (the closest)
is saved per object of catalog 1 having a counterpart.
-save_in_one_file: if True the results will be saved in one file, of which the first columns are of catalog1 (only those for which
cross matching entries in catalog_2 were found), and then come the columns of catalog2
-save_in_two_files: if True the results will be saved in two separate files. One has the entries of catalog_1 having at least one counterpart in catalog2
and the second is the entries of catalog 2 for the closest counterparts of catalog_2
-catalogs_dir: the directory where the HDF5 catalogs are stored
Output : if save_results=True, the cross-matching pieces of catalog_1 and catalog_2 are stored in the output directory given as the "output" key.
example: catsHTM.xmatch_2cats('FIRST','NVSS',Verbose=False,save_in_one_file=True,save_results=True,save_in_separate_files=True)
By : Maayane Soumagnac (original Matlab function by Eran Ofek) August 2018
"""
#Converts search_radius into radians
Rad = 180. / math.pi
Search_radius=Search_radius/(Rad*3600) #converts arcsec radius into radians radius
###### find the max level between the level of each catalog #####
CatDir1=get_CatDir(Catname1) #le catalog 1 sous forme de numpy array
CatDir2=get_CatDir(Catname2) #le catalog 1 sous forme de numpy array
##if Verbose==True:
IndexFileName1 = get_index_filename(Catname1)[0] # name of the index file associated with Catname
IndexVarName1 = get_index_filename(Catname1)[1] # name of the data set containing the index filename content
HTM1 = load_HTM_ind(IndexFileName1, IndexVarName1, catalogs_dir=catalogs_dir, CatDir=CatDir1)[0]#content of the catalog index file into a dictionnary
IndexFileName2 = get_index_filename(Catname2)[0] # name of the index file associated with Catname
IndexVarName2 = get_index_filename(Catname2)[1] # name of the data set containing the index filename content
HTM2 = load_HTM_ind(IndexFileName2, IndexVarName2, catalogs_dir=catalogs_dir,
CatDir=CatDir2)[0]
N_trixels_1=Number_of_trixels(Catname1,catalogs_dir=catalogs_dir,CatDir=CatDir1) # number of trixels in catalog 1
N_trixels_2=Number_of_trixels(Catname2,catalogs_dir=catalogs_dir,CatDir=CatDir2) # number of trixels in catalog 2
#if Verbose==True:
print('Catalog_1 is {0} ({1} trixels)'.format(Catname1,N_trixels_1))
print('Catalog_2 is {0} ({1} trixels)'.format(Catname2, N_trixels_2))
#print('Catalog_2 is', CatDir2)
#print('The number of trixels in {0} is {1}'.format(CatDir1,N_trixels_1))
#print('The number of trixels in {0} is {1}'.format(CatDir2,N_trixels_2))
L1=celestial.number_of_trixels_to_level(N_trixels_1)[0] #number of levels in catalog 1
L2=celestial.number_of_trixels_to_level(N_trixels_2)[0] #number of levels in catalog 2
if Verbose==True:
print('The level of {0} is {1}'.format(Catname1,L1))
print('The level of {0} is {1}'.format(Catname2,L2))
Lmax=max(L1,L2)
if Verbose==True:
print('Lmax is',Lmax)#ok compared with Eran; maximum level between cat1 and cat2
####### Create the list of trixel's indexes associated with each level #########
print('************** I am building all the trixels relevant to our search **************')
built_array = celestial.htm_build(Lmax,Verbose=Verbose)
HTM=built_array[0]
Level=built_array[1] #une liste de Lmax dictionnaires, tels que dic['level']=un nombre designant le level (0 pour le level1) et dic['ptr']=un np array des indices des rixels a ce level
#print(HTM[0].coo())
#pdb.set_trace()
#print('HTM[0] is',HTM[0])#ok compared with eran
#print('HTM[1] is', HTM[1])#ok compared with Eran
#print('HTM[0][coo] is',HTM[0]['coo'])#ok w Eran
#print('HTM[8][coo] is', HTM[8]['coo'])# ok
#print('HTM[9][coo] is', HTM[9]['coo'])#ok
#print('HTM[10920][coo] is',HTM[10920]['coo'])#ok
#('HTM[10920] is',HTM[10920])
#pdb.set_trace()
Level1=Level[L1-1] # le dictionnaire de Level correspondant au level L1: Level1['Level']=L1-1 et Level1['ptr']= le unumpy array des index des trixesls a ce niveau
Level2=Level[L2-1]
if Verbose==True:
print('Level1:',Level1)
print('Level2:',Level2)
Nh1=len(Level1['ptr'])#the number of trixels in the highest level
print('The number of trixels in the highest level, for {0} is {1}'.format(Catname1,Nh1))#ok
#pdb.set_trace()
Nh2=len(Level2['ptr'])
print('The number of trixels in the highest level, for {0} is {1}'.format(Catname2, Nh2)) #ok
#pdb.set_trace()
ColCell2=load_colcell(catalogs_dir+'/'+CatDir2,Catname2)[0]
ColUnits2=load_colcell(catalogs_dir+'/'+CatDir2,Catname2)[1]
Ncol2=np.shape(ColCell2)[0]
ColCell1=load_colcell(catalogs_dir+'/'+CatDir1,Catname1)[0]
ColUnits1=load_colcell(catalogs_dir+'/'+CatDir1,Catname1)[1]
Ncol1=np.shape(ColCell1)[0]
if Verbose==True:
print('{0} has the following fields {1}'.format(CatDir1,ColCell1))
print('in units',ColUnits1)
print('{0} has the following fields {1}'.format(CatDir2, ColCell2))
print('in units', ColUnits2)
#At this stage, we have 2 Level dictionnaries, one per each catalog, such that LevelX['level'] is the number of the highest level (0 for level 1)
# and LevelX['ptr'] is the list of trixels indexes at the highest level
#Next, we go through all the highest level trixels of Catalog 1, and for each trixel, if it contains sources, we check if there are some overlapping trixels in catalog 2
if save_results == True:
if os.path.exists(output):
print('the output directory, ' + output + ' exists already')
else:
os.mkdir(output)
header1 = ",".join([Catname1+':'+ColCell1[i] + ' (' + ColUnits1[i] + ')' for i in range(np.shape(ColCell1)[0])])
header2 = ",".join([Catname2+':'+ColCell2[i] + ' (' + ColUnits2[i] + ')' for i in range(np.shape(ColCell2)[0])])
cross_matching_result = np.empty((1, np.shape(ColCell1)[0] + np.shape(ColCell2)[0]))
#print(np.shape(cross_matching_result))
#print('header1 is',header1)
#print('header2 is', header2)
#print(header1+','+header2)
if save_results==True:
if save_in_one_file==True:
if os.path.exists(output + '/cross-matching_result_full.txt'):
print('the txt file exists already, I am removing it')
os.remove(output + '/cross-matching_result_full.txt')
if save_in_separate_files==True:
if os.path.exists(output + '/cross-matching_result_{0}.txt'.format(Catname1)):
print('the txt file for {0} exists already, I am removing it'.format(Catname1))
os.remove(output + '/cross-matching_result_{0}.txt'.format(Catname1))
if os.path.exists(output + '/cross-matching_result_{0}.txt'.format(Catname2)):
print('the txt file for {0} exists already, I am removing it'.format(Catname2))
os.remove(output + '/cross-matching_result_{0}.txt'.format(Catname2))
#print("Level1['ptr'] is", Level1['ptr'])
#np.savetxt('indexes.txt',Level1['ptr'])
print('************** I am looking for overlapping trixels **************')
start = time.time()
if Debug == True:
print('I will stop at the following indexes, if the trixels exists, to debug, ok? press c to continue',
[Nh1//1000,Nh1//200,Nh1 // 100, Nh1 //10, Nh1 //4, Nh1 //3, Nh1 // 2, Nh1 // 1.5])
pdb.set_trace()
for i in tqdm.tqdm(range(Nh1)): #for each trixels in the highest level of Cat1
#print("Level1['ptr'][Nh1-1] is",Level1['ptr'][Nh1-1])
#print("Level1['ptr'][i] is",Level1['ptr'][i])
index_cat1=Level1['ptr'][i]# takes the index of this trixel and check if this trixel contains sources:
#print('I am looking for Catalog_2 ({0}) trixels overlapping with the trixel #{2} of Catalog_1 ({1})'.format(Catname2,Catname1,index_cat1))
if HTM1[index_cat1-1]['Nsrc']>0:#if the trixel contains sources:
#if index_cat1==27305:
if Verbose==True:
print('I am looking for Catalog_2 ({0}) trixels overlapping with the non-empty trixel #{2} ({3}/{4}) of Catalog_1 ({1})'.format(Catname2, Catname1, index_cat1,i,Nh1))
print('there are {0} sources in this trixel'.format(HTM1[index_cat1-1]['Nsrc']))
#print('not empty')
#print('I am looking for Catalog_2 ({0}) trixels overlapping with the trixel #{2} of Catalog_1 ({1})'.format(Catname2,Catname1,index_cat1))
#print('the file with index {0} has {1} sources'.format(index_cat1,HTM1[index_cat1]['Nsrc']))
#start = time.time()
Cat1=load_trix_by_ind(Catname1,index_cat1,num=100,catalogs_dir=catalogs_dir,Verbose=Verbose)[0]#load the content of that trixel (in the form of a numpy array)
#ongoing1=time.time()
#print(Cat1)#ok
#Cat 1 is a numpy array with the content of a trixel that contains sources, at the highest level of Catalog1
#PolesCoo ok
#print("HTM[index_cat1-1]['coo'] is",HTM[index_cat1-1]['coo'])#ok
MeanRa=np.mean(HTM[index_cat1-1]['coo'][:,0]) # le meam Ra de ce trixel
MeanDec=np.mean(HTM[index_cat1-1]['coo'][:,1]) # le mean Dec de ce trixel
MinDec=np.min(HTM[index_cat1-1]['coo'][:,1])-Search_radius
MaxDec = np.max(HTM[index_cat1 - 1]['coo'][:, 1]) + Search_radius
#print('MeanRa is', MeanRa) #ok
#print('MeanDec is',MeanDec)#ok
D=celestial.sphere_dist_fast(MeanRa,MeanDec,HTM[index_cat1-1]['coo'][:,0],HTM[index_cat1-1]['coo'][:,1])[0]
#print('D is',D)
CircRadius=np.max(D)+Search_radius
#print('CircRadius is',CircRadius)
ID2=celestial.htm_search_cone(HTM2,MeanRa,MeanDec,CircRadius,Ind=[])
#if Verbose==True:
ID2w=simplify3(ID2)
ongoing2 = time.time()
if Verbose==True:
print('there are {0} trixel overlapping with it'.format(len(ID2w)))#ok
#pdb.set_trace()
print('the list of trixels indexes of Catalog_2({0}) overlapping with the trixel #{2} of Catalog_1({1}) is {3}'.format(
Catname2, Catname1, index_cat1,ID2w))
#print('the list without brakets is',ID2w)# a list of indexes of cat2 trixels, which overlap with the cat1 trixel
#load all the data corresponding to ID2w
Nid2=len(ID2w) #the number of trixels of cat 2 overlapping with the given trixel of cat1 which we are examining.
for s in range(Nid2):#for all trixels of catalog 2 overlapping with the given trixel of catalog1
if s==0:
[Cat2,Ind2]=load_trix_by_ind(Catname2,ID2w[s],[MinDec,MaxDec],catalogs_dir=catalogs_dir,Ncol=Ncol2,Verbose=Verbose)
N2=np.shape(Cat2)[0]
#Cat2ID=np.array(list(zip(ID2w[i]*np.ones(N2),Ind2+np.array(range(N2)))))#MAYBE Ind2-1?
#print('len(Cat2) after i=0 is',len(Cat2))
#pdb.set_trace()
else:
if Verbose==True:
print('**********')
print("(catalog_2) {0}'s trixel (overlapping with (catalog_1) {1}'s trixel) of index {2}:".format(Catname2,Catname1,index_cat1))
[Cat2tmp,Ind2]=load_trix_by_ind(Catname2,ID2w[s],[MinDec,MaxDec],catalogs_dir=catalogs_dir,Ncol=Ncol2,Verbose=Verbose)
#print('i={0},shape(Cat2) and shape(Cat2tmp) are {1} and {2}'.format(i,np.shape(Cat2),np.shape(Cat2tmp)))
#pdb.set_trace()
#ongoing3 = time.time()
if len(Cat2)>0:
#print('at this (1) stage len(Cat2) is',len(Cat2))
#print('Cat2tmp (1) is',Cat2tmp)
if len(Cat2tmp)>0:
Cat2=np.vstack((Cat2,Cat2tmp))
N2 = np.shape(Cat2)[0]
#else:
#Cat2ID=np.vstack((Cat2ID,np.array(list(zip(ID2w[i]*np.ones(N2),Ind2+np.array(range(N2)))))))#MAYBE Ind2-1?
#else: Cat2 reste tel quel
else:#si Cat2 etait vide
#print('at this (2) stage len(Cat2) is',len(Cat2))
#print('Cat2 was empty?')
if len(Cat2tmp)>0:#si Cat2tmp n'est pas vide, Cat2 devient lui
#print('Cat2tnp.argwhere(np.isnan(x))mp (2) is',Cat2tmp)
Cat2=np.copy(Cat2tmp)
N2 = np.shape(Cat2)[0]
#print('Cat 2 is', Cat2)
#pdb.set_trace()
#Cat2ID=np.vstack((Cat2ID,np.array(list(zip(ID2w[i]*np.ones(N2),Ind2+np.array(range(N2)))))))#MAYBE Ind2-1?
#else: Cat2 reste vide
#print('Cat2 is',Cat2)
#print('len(Cat2) is',len(Cat2))
#print('np.shape(Cat1) is',np.shape(Cat1))
#print('np.shape(Cat2) is', np.shape(Cat2))
#ongoing4 = time.time()
# C'est quoi Cat2? Cat2 is a catalog with the content of *all the Catalogue 2 trixels overlapping with the given trixel of cat1
# C'est quoi Cat2ID?*
#print('Cat2 before sorting is',Cat2)
#print('Cat2[:, 1] is',Cat2[:,1] )
#pdb.set_trace()
#print('len(Cat2) after the loop is',len(Cat2))
#pdb.set_trace()
if len(Cat2)>0:
cat2=Cat2[Cat2[:, 1].argsort(),] #cat2 est Cat2 -l'ensemble des trixels qui overlappent cat1 -trié par Dec croissant. On a besoin de ca pour applyer match_cats.
#np.savetxt('cat2.txt', cat2)
#SI=Cat2[:, 1].argsort() #SI est les indexes de Dec croissants de Cat2
#print('SI is',SI)# ok, verifie avec matlab
#probleme: cat 2 c est toutes les sources des overlapping trixels. Nous on veut que les sources reelelemt overlapping. donc on run match_cat
#ongoing5 = time.time()
[Match,Ind,IndCatMinDist]=match_cats(cat2,Cat1,Radius=Search_radius,RadiusUnits='rad')
#print('Ind',Ind)
#pdb.set_trace()
if QueryAllFun is not None:
if Verbose==True:
print('I am applying a function on the cross-match result!')
#if i==0:
# Data=np.array([])
#else:
Data = QueryAllFun(Cat1, Ind, cat2, IndCatMinDist, i, additionnal_args=QueryAllFunPar)
#CAREFULLLLLLL it s cat2 not Cat2!!!!
#print(Data)
#pdb.set_trace()
#ongoing6 = time.time()
#Match:a dictionnary with the following keys
#Match['Nfound']= a vector, the length of cat1, with the number of sources found in the cat2 that are within the search radius from the source in the reference catalog Cat1.
#Match['MinDist']=a vector, the size of cat1, wiht the Minimum distance (radians) of sources in cat2 to the source in cat1. NaN if not found
#Ind: a list of dictionnaries (as many as sources in Cat1 THAT HAVE CROSS-MTACHED SOURCES in cat2)
# Ind[i]['IndRef']=Indice of source in cat1
# Ind[i]['IndCat']=List of indices in cat2 that are matched to the 'IndRef' source of Cat1.
# Ind[i]['Dist']= Vecor of angular distances (radians) for each one of the sources indicated in 'IndCat'.
# Ind[i]['Num']=Number of sources within search radius
# IndCatMinDist: a vector of indices of cat2 objects which are the closest to the source in cat1. NaN if not found ??
#print("Match['Nfound'] is",Match['Nfound']) #ok, verifie avec matlab
#print("Match['MinDist'] is", Match['MinDist']) #ok, verifie avec matlab
#print("Match['MinPA'] is", Match['MinPa']) #ok, verifie avec matlab
#print("Ind is",Ind)
#print("the Ind['Num'] are:",[Ind[i]['Num'] for i in range(len(Ind))]) # ok
#print("the Ind['IndCat'] are:", [Ind[i]['IndCat'] for i in range(len(Ind))]) # ok, moi=matlab-1, normal
#print("the Ind['IndRef'] are:", [Ind[i]['IndRef'] for i in range(len(Ind))]) # ok, moi=matlab-1, normal
#print("the Ind['Dist'] are:", [Ind[i]['Dist'] for i in range(len(Ind))]) # ok
#pdb.set_trace()
#print('IndCatMinDist is',IndCatMinDist)#ok, moi=matlab-1, normal
#print('the shape of IndCatMinDist is',np.shape(IndCatMinDist)[0]) #ok
""" if (~isempty(InPar.QueryAllFun))
% execute InPar.QueryAllFun
% QueryAllFun(Cat1,Ind,Cat2,varargin)
if (Ih1==Istart)
Data = [];
end
Data = InPar.QueryAllFun(Cat1,Ind,Cat2,IndCatMinDist,InPar.QueryAllFunPar{:},'Data',Data,'Ih1',Ih1,'Nh1',Nh1,'SearchRadius',InPar.SearchRadius);
end"""
IsN=np.isnan(IndCatMinDist)# un tableau de booleans qui est True la ou il y a zero sources cross-matched, et False la ou il y en a
#print('IsN is',IsN)
#print('IsN is',IsN) ok, mais moi c est des True et False et matlab c est des 0 et 1
#print('the shape of IsN is',np.shape(IsN)) ok
IndCatMinDist[IsN]=True #
#if V
#print('IndCatMinDist is now',IndCatMinDist) # un tableau de la taille de cat1 avec : la ou il y a pas de cross-matched dans cat2: 1, et la ou il y en a: l'indice de l'objet de cat2 le plus proche
"""
ceci: pas clair a quoi ca sert dans le code de matlab. Je laisse tomber.
print("Cat2ID is",Cat2ID) #ok mais pas sur qu'il dooivent etre identiques
print("SI[IndCatMinDist.astype(int)] is",SI[IndCatMinDist.astype(int)]) #pas ok
pdb.set_trace()
DataInd=Cat2ID[SI[IndCatMinDist.astype(int)],:]
DataInd[IsN,:]=np.nan
print('DataInd is', DataInd) # pareil que matlab mais pas sur que c est bien
"""
#print("IndCatMinDist.astype(int) is",IndCatMinDist.astype(int))
#print("np.shape(cat2)",np.shape(cat2))
#print("np.shape(IndCatMinDist)",np.shape(IndCatMinDist))
#print("np.shape(IndCatMinDist.astype(int))",np.shape(IndCatMinDist.astype(int)))
#print("cat2[IndCatMinDist.astype(int)-1,:] is",cat2[IndCatMinDist.astype(int)-1,:])
#print('IndCatMinDist.astype(int)-1 is',IndCatMinDist.astype(int)-1)
#print("cat2[IndCatMinDist.astype(int),:] is", cat2[IndCatMinDist.astype(int), :])
#print('IndCatMinDist.astype(int) is', IndCatMinDist.astype(int))
indexes_analog_to_matlab=np.zeros(np.shape(IndCatMinDist))
indexes_analog_to_matlab[IndCatMinDist!=1]=IndCatMinDist[IndCatMinDist!=1]
#THIS CHECK IS CRUCIAL! DON'T ARAISE
#if Verbose==True:
# print('i (or matlab Ih1-1)={0},indexes_analog_to_matlab must be matlab Indcatmindist-1 everywhere, check if this is the case: {1}'.format(i,indexes_analog_to_matlab))#ok
#
Cat2matched = cat2[indexes_analog_to_matlab.astype(int), :]#ok
#Cat2matched=cat2[IndCatMinDist.astype(int),:]
#Cat2matched=cat2[IndCatMinDist.astype(int),:]
#print('cat2 is,',cat2)
# Cat2matched est un tableau, de la longueur de cat1 avec:
# -la ligne 0 de cat2 si la ligne correspond a un indice de cat1 qui a pas de cross-match
# -s'il y a un cross-matched dans cat2: la ligne de cat2
#print("np.shape(Cat2matched)",np.shape(Cat2matched))
#print("np.shape(IsN)",np.shape(IsN))
Cat2matched[IsN,:]=np.nan #
#print('Cat2matched is', Cat2matched)
if Debug==True:
if i in [Nh1//1000,Nh1//200,Nh1 // 100, Nh1 //10, Nh1 //4, Nh1 //3, Nh1 // 2, Nh1 // 1.5]:
print('******** i={0} ********'.format(i))
print('I am saving Cat2matched')
np.savetxt(output+'Cat2matched_{0}_4debug.txt'.format(i),Cat2matched) #pas ok
pdb.set_trace()
#print('Cat2matched at the index of IndCatMinDist is',Cat2matched[IndCatMinDist!=1])
#print('IndCatMinDist', IndCatMinDist)
#pdb.set_trace()
#print('Cat2matched is', Cat2matched)
# un tableau, avec le meme nombre de lignes que cat1 et le nombre de colomnes de cat2 avec:
# -NaN si cette ligne de cat1 a pas de cross-match
# -s'il y a un cross-matched dans cat2: la ligne de cat2 correspondant a l objet le plus proche
# print("np.shape(Cat2matched)",np.shape(Cat2matched))
#print('cat1',cat1)
#print('Cat2matched is',Cat2matched)#ok avec matlab
#pdb.set_trace()
#print('np.shape(Cat2matched is)',np.shape(Cat2matched)) #ok avec matlab
#from here it is added by me
#create a numpy array with: columns of cat1, columns of Cat2matched
#print('let us just make sure that Cat1 and Cat2matched have same number of lines.')ok
#print('np.shape(Cat1) is',np.shape(Cat1))
#print('np.shape(Cat2matched) is', np.shape(Cat2matched))
#if save_results==True:
# if os.path.exists(output):
# print('the output directory, ' + output+ ' exists already')
# else:
# os.mkdir(output)
# if os.path.exists(output+'/trixel_'+str(index_cat1)+'_'+Catname1):
# print('the output directory, ' + output+'/trixel_'+str(index_cat1)+'_'+Catname1 + ' exists already')
# else:
# os.mkdir(output+'/trixel_'+str(index_cat1)+'_'+Catname1)
if save_results==True:
cross_matching_result_w_nans=np.hstack((Cat1,Cat2matched))
#cross_matching_result_intermediate = np.empty((1,np.shape(Cat1)[1]+np.shape(cat2)[1]))
cross_matching_result_intermediate = np.zeros((1, np.shape(Cat1)[1] + np.shape(cat2)[1]))
for i,j in enumerate(cross_matching_result_w_nans[:,0]): #for all lines,remove the lines where no cross-matched object
if np.all(np.isnan(cross_matching_result_w_nans[i, np.shape(Cat1)[1]:])) == False:
if Verbose==True:
print('At line {0} of Cat1, there is a cross-matched object in cat2'.format(i))
#print('Cat2matched[i,:] is',Cat2matched[i,:])
#pdb.set_trace()
if np.shape(cross_matching_result_intermediate)[0]<2:
#print('np.shape(cross_matching_result_intermediate)[0] is',np.shape(cross_matching_result_intermediate)[0])
cross_matching_result_intermediate=cross_matching_result_w_nans[i,:]
cross_matching_result_intermediate_cat1 = cross_matching_result_w_nans[i, :np.shape(Cat1)[1]]
cross_matching_result_intermediate_cat2 = cross_matching_result_w_nans[i,np.shape(Cat1)[1]:np.shape(Cat1)[1]+np.shape(Cat2matched)[1]]
else:
#print('else')
cross_matching_result_intermediate=np.vstack((cross_matching_result_intermediate,cross_matching_result_w_nans[i,:]))
cross_matching_result_intermediate_cat1 = cross_matching_result_intermediate[:, :np.shape(Cat1)[1]]
cross_matching_result_intermediate_cat2 = cross_matching_result_intermediate[:,np.shape(Cat1)[1]:np.shape(Cat1)[1]+np.shape(Cat2matched)[1]]
#else:
#print('there are no counterparts in cat2')
all_zeros = not np.any(cross_matching_result_intermediate)
if all_zeros==True:
print('There are no counterpart at all in cat 2 for this tri1xel')
#pdb.set_trace()
else:
#print('the shape of cross_matching_result_intermediate_cat1 is',np.shape(cross_matching_result_intermediate_cat1))
#print('the shape of cross_matching_result_intermediate_cat2 is',
# np.shape(cross_matching_result_intermediate_cat2))
#print('the shape of cross_matching_result_intermediate is',
# np.shape(cross_matching_result_intermediate))
#print('ndim of cross_matching_result_intermediate_cat1) is 1?',
# cross_matching_result_intermediate_cat1.ndim)
#print('the len of cross_matching_result_intermediate_cat1 is',np.shape(cross_matching_result_intermediate_cat1)[0])
#print('the len of cross_matching_result_intermediate_cat2 is',np.shape(cross_matching_result_intermediate_cat2)[0])
#print('the len of cross_matching_result_intermediate is',np.shape(cross_matching_result_intermediate)[0])
#if np.shape(cross_matching_result_intermediate_cat1)[0]!=np.shape(cross_matching_result_intermediate_cat2)[0]:
# print('ndim of cross_matching_result_intermediate_cat1) is 1?',cross_matching_result_intermediate_cat1.ndim)
# print('the shapes are not the same, probleme!')
# print(cross_matching_result_intermediate_cat1)
# print(cross_matching_result_intermediate_cat2)
# print('np.shape(cross_matching_result_intermediate)[0] is',np.shape(cross_matching_result_intermediate)[0])
# print('cross_matching_result_intermediate is',cross_matching_result_intermediate)
# print('np.shape(cross_matching_result_w_nans[i,:]))',np.shape(cross_matching_result_w_nans[i,:]))
# pdb.set_trace()
#if Verbose is True:
print('The entries from catalog_1 ({0}) :{1}, cross-matched in catalog_2 ({2}) are {3}'.format(Catname1,cross_matching_result_intermediate_cat1,Catname2,cross_matching_result_intermediate_cat2))
pdb.set_trace()
#print('cross_matching_result is',cross_matching_result)
#print('Is the cross_matching_result the size of Ind?')#yes
#print(np.shape(cross_matching_result))
#print(len(Ind))
#print('Is the number of columns of cross_matching_result the sum of the number of columns of cat1 and cat2?')#yes
#print(np.shape(cross_matching_result))
#print(np.shape(Cat1))
#print(np.shape(cat2))
"""
if (~isempty(InPar.QueryFun))
% execute InPar.QueryFun
% QueryFun can select specific sources (by some
% attributes) from the matched Cat1 and Cat2
FlagSelected = InPar.QueryFun(Cat1,Cat2matched,InPar.QueryFunPar{:});
% what to do with FlagSelected?
Cat1 = Cat1(FlagSelected,:);
Cat2matched = Cat2matched(FlagSelected,:);
end
if (~isempty(InPar.SaveFun))
% execute InPar.SaveFun
% Fun(Cat1,Cat2matched)
InPar.SaveFun(Cat1,Cat2matched,InPar.SaveFunPar{:});
end
"""
#print('np.shape(cross_matching_result_intermediate) is ',np.shape(cross_matching_result_intermediate))
#print(
#'np.shape(cross_matching_result_intermediate_cat1) is ', np.shape(cross_matching_result_intermediate_cat1))
#print(
#'np.shape(cross_matching_result_intermediate_cat2) is ', np.shape(cross_matching_result_intermediate_cat2))
#if np.shape(cross_matching_result_intermediate_cat1)[0]!=np.shape(cross_matching_result_intermediate)[0]:
# print('pb!')
# print('cross_matching_result_intermediate is',cross_matching_result_intermediate)
# print('cross_matching_result_intermediate_cat1 is',cross_matching_result_intermediate_cat1)
# print('cross_matching_result_intermediate_cat2 is', cross_matching_result_intermediate_cat2)
# pdb.set_trace()
if save_in_one_file==True:
if os.path.exists(output +'/cross-matching_result_full.txt')==False:
with open(output +'/cross-matching_result_full.txt', 'ab') as f:
if cross_matching_result_intermediate.ndim>1:
np.savetxt(f, cross_matching_result_intermediate, delimiter=",",header=header1+','+header2)
else:
np.savetxt(f, cross_matching_result_intermediate[None], delimiter=',',header=header1+','+header2)
else:
with open(output +'/cross-matching_result_full.txt', 'ab') as f:
if cross_matching_result_intermediate.ndim > 1:
np.savetxt(f, cross_matching_result_intermediate, delimiter=",")
else:
np.savetxt(f, cross_matching_result_intermediate[None], delimiter=",")
if save_in_separate_files==True:
if os.path.exists(output +'/cross-matching_result_{0}.txt'.format(Catname1))==False:
with open(output +'/cross-matching_result_{0}.txt'.format(Catname1), 'ab') as f:
if cross_matching_result_intermediate_cat1.ndim>1:
np.savetxt(f, cross_matching_result_intermediate_cat1, delimiter=",",header=header1)
else:
np.savetxt(f, cross_matching_result_intermediate_cat1[None], delimiter=",",
header=header1)
else:
with open(output + '/cross-matching_result_{0}.txt'.format(Catname1), 'ab') as f:
if cross_matching_result_intermediate_cat1.ndim>1:
np.savetxt(f, cross_matching_result_intermediate_cat1,
delimiter=",")
else:
np.savetxt(f, cross_matching_result_intermediate_cat1[None],
delimiter=",")
if os.path.exists(output + '/cross-matching_result_{0}.txt'.format(Catname2)) == False:
with open(output + '/cross-matching_result_{0}.txt'.format(Catname2), 'ab') as f:
if cross_matching_result_intermediate_cat2.ndim>1:
np.savetxt(f, cross_matching_result_intermediate_cat2,
delimiter=",",header=header2)
else:
np.savetxt(f, cross_matching_result_intermediate_cat2[None],
delimiter=",", header=header2)
else:
with open(output + '/cross-matching_result_{0}.txt'.format(Catname2), 'ab') as f:
if cross_matching_result_intermediate_cat2.ndim>1:
np.savetxt(f, cross_matching_result_intermediate_cat2,
delimiter=",")
else:
np.savetxt(f, cross_matching_result_intermediate_cat2[None],
delimiter=",")
#time checker:
#ongoing7 = time.time()
#print(ongoing7 - ongoing6)
#print(ongoing6 - ongoing5)#bcp
#print(ongoing5 - ongoing4)
#print(ongoing4-ongoing3)
#print(ongoing3-ongoing2)#bcp
#print(ongoing2-ongoing1)
#print(ongoing1-start)
#print(ongoing7-start)
#pdb.set_trace()
else:
print('None of the trixels of catalog_2 ({0}) overlapping with trixel #{1} of catalog_1 ({2}) has sources in it'.format(Catname2,index_cat1,Catname1))
#pdb.set_trace()
else:
if Verbose == True:
print('trixel #{0} of Catalog_1 ({1}) is empty'.format(index_cat1,Catname1))
if time_it==True:
ongoing7 = time.time()
print('it took {0} seconds for the process to run'.format(ongoing7 - start))
def read_ztf_HDF_matched(FieldID,Lines,ColCell=None,path=None):
"""
Description: Read ZTF matched light curves from local HDF5 light curve files. The HDF5 files are distributed as part of the catsHTM catalogs.
Input : - ZTF field number.
- [start end] lines to read. The lines for a given source are
available in I1 and I2 in the 'ztfSrcLCDR1' catsHTM catalog.
- ColCell' - Column names for catalog.
Default is {'HMJD','Mag','MagErr','ColorCoef','Flags'}.
- path to the data directory. Default is "."
Output : - Catalog
- ColCell
By : Maayane Soumagnac. Trnslated from Eran O. Ofek's matlab routine with the same name
URL : https://github.com/maayane/catsHTM; http://weizmann.ac.il/home/eofek/matlab/
Example: Cat,ColCel=catsHTM.read_ztf_HDF_matched(815,[10,25],ColCell=None,path=path)
"""
if ColCell is None:
ColCell = np.array(['HMJD','Mag','MagErr','ColorCoef','Flags'])
if path is None:
path='.'
FieldIDstring="{number:06}".format(number=FieldID)#'ztfLCDR1_%06d.hdf5'
FileName = 'ztfLCDR1_'+FieldIDstring+'.hdf5'
Cati = class_HDF5.HDF5(path+'/'+FileName).load(dataset_name='/AllLC',numpy_array=True)#,Offset=[Lines[0],Lines[1]-Lines[0]+1])#,Block=[Lines[1]-Lines[0], Ncol-1])
Cat=Cati.T
Cat_cut=Cat[Lines[0]-1:Lines[1],:]
return Cat_cut,ColCell
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from gaebusiness.business import Command, CommandSequential, CommandParallel
from gaebusiness.gaeutil import SaveCommand, ModelSearchCommand, SingleModelSearchCommand
from gaeforms.ndb.form import ModelForm
from gaegraph.business_base import UpdateNode, CreateSingleArc, SingleDestinationSearch, NodeSearch, SingleOriginSearch
from slugify import slugify
from gaeslug.model import Slug, ToSlug
from webapp2_extras.i18n import gettext as _
def _slugfy(text):
return slugify(text, max_length=500, word_boundary=True) # limit from db
class SlugForm(ModelForm):
"""
Form used do save and update operations
"""
_model_class = Slug
_include = [Slug.name]
class SlugFormDetail(ModelForm):
"""
Form used to show entity details
"""
_model_class = Slug
_include = [Slug.creation,
Slug.name]
class SlugFormShort(ModelForm):
"""
Form used to show entity short version, mainly for tables
"""
_model_class = Slug
_include = [Slug.creation,
Slug.name]
class Tip(Command):
"""
Command to return a valid slug tip based on string
"""
def __init__(self, text, max_attempts=20):
super(Tip, self).__init__()
self.max_attempts = max_attempts
self.text = text
def do_business(self):
cmd = FindSlug(self.text)
for i in xrange(2, self.max_attempts + 1):
model = cmd()
if model is None:
self.result = cmd.slug
return
cmd = FindSlug(self.text + ('-%s' % i))
self.add_error('name', _('It was impossible generate a slug'))
class FindSlug(SingleModelSearchCommand):
def __init__(self, slug):
self.slug = _slugfy(slug)
super(FindSlug, self).__init__(Slug.query_by_slug(self.slug))
class FindSlugBySlugNameOrId(CommandParallel):
def __init__(self, slug_or_id):
try:
cmd = NodeSearch(slug_or_id)
except:
cmd = FindSlug(slug_or_id)
super(FindSlugBySlugNameOrId, self).__init__(cmd)
class FindSlugOrigin(SingleOriginSearch):
arc_class = ToSlug
class FindObjBySlug(FindSlug):
def do_business(self, stop_on_error=True):
super(FindObjBySlug, self).do_business(stop_on_error)
if self.result:
cmd = FindSlugOrigin(self.result)
self.result = cmd()
class NaiveSaveSlugCommand(SaveCommand):
_model_form_class = SlugForm
def handle_previous(self, command):
self.form.name = command.result
class CreateToSlug(CreateSingleArc):
arc_class = ToSlug
class SaveSlugCommand(CommandSequential):
def __init__(self, name, origin):
tip = Tip(name)
naive = NaiveSaveSlugCommand()
create_arc = CreateToSlug(origin, naive)
super(SaveSlugCommand, self).__init__(tip, create_arc)
class FindSlugFromObject(SingleDestinationSearch):
arc_class = ToSlug
class FindSlugStringFromObject(FindSlugFromObject):
def do_business(self):
super(FindSlugStringFromObject, self).do_business()
self.result = self.result and self.result.name
class FindObjBySlugOrIdUnsecure(CommandParallel):
def __init__(self, slug_or_id):
try:
cmd = NodeSearch(slug_or_id)
except:
cmd = FindObjBySlug(slug_or_id)
super(FindObjBySlugOrIdUnsecure, self).__init__(cmd)
class FindObjBySlugOrId(FindObjBySlugOrIdUnsecure):
def __init__(self, slug_or_id, expected_obj_class):
super(FindObjBySlugOrId, self).__init__(slug_or_id)
self.expected_obj_class = expected_obj_class
def do_business(self):
super(FindObjBySlugOrId, self).do_business()
if self.result and not isinstance(self.result, self.expected_obj_class):
error_msg = 'someone trying to reach object %s, but expected class is %s' % (self.result,
self.expected_obj_class)
self.add_error('security', error_msg)
class NaiveUpdateSlugCommand(UpdateNode):
_model_form_class = SlugForm
def handle_previous(self, command):
self.form.name = command.result
class UpdateSlugCommand(CommandSequential):
def __init__(self, model_key, name, **form_parameters):
super(UpdateSlugCommand, self).__init__(Tip(name), NaiveUpdateSlugCommand(model_key, **form_parameters))
class ListSlugCommand(ModelSearchCommand):
def __init__(self, page_size=100, start_cursor=None, offset=0, use_cache=True, cache_begin=True, **kwargs):
super(ListSlugCommand, self).__init__(Slug.query_by_creation(), page_size, start_cursor, offset, use_cache,
cache_begin, **kwargs)
|
from struct import pack, unpack
def bit_bool(value):
return pack('?', value)
def to_bool(value):
return unpack('?', value)[0]
def sint16(value):
return pack('i', value)
def to_int(value):
return unpack('i', value)[0]
|