text stringlengths 2.5k 6.39M | kind stringclasses 3
values |
|---|---|
import Field from './Field';
if (!Array.isArray) {
// @ts-ignore
Array.isArray = function(arg: any) {
return Object.prototype.toString.call(arg) === '[object Array]';
};
}
if (typeof Object.assign != 'function') {
// Must be writable: true, enumerable: false, configurable: true
Object.defineProperty(Object, "assign", {
value: function assign(target: any, varArgs: any) { // .length of function is 2
'use strict';
if (target == null) { // TypeError if undefined or null
throw new TypeError('Cannot convert undefined or null to object');
}
let to = Object(target);
for (let index = 1; index < arguments.length; index++) {
const nextSource = arguments[index];
if (nextSource != null) { // Skip over if undefined or null
for (let nextKey in nextSource) {
// Avoid bugs when hasOwnProperty is shadowed
if (Object.prototype.hasOwnProperty.call(nextSource, nextKey)) {
to[nextKey] = nextSource[nextKey];
}
}
}
}
return to;
},
writable: true,
configurable: true
});
}
const hasOwnProperty = Object.prototype.hasOwnProperty;
const symToStringTag = typeof Symbol !== 'undefined' ? Symbol.toStringTag : undefined;
function baseGetTag (value: any) {
if (value === null) {
return value === undefined ? '[object Undefined]' : '[object Null]';
}
if (!(symToStringTag && symToStringTag in Object(value))) {
return toString.call(value)
}
const isOwn = hasOwnProperty.call(value, symToStringTag);
const tag = value[symToStringTag];
let unmasked = false;
try {
value[symToStringTag] = undefined;
unmasked = true
} catch (e) {
}
const result = Object.prototype.toString.call(value);
if (unmasked) {
if (isOwn) {
value[symToStringTag] = tag;
} else {
delete value[symToStringTag];
}
}
return result;
}
export function TypeOf(value: any) {
return Object.prototype.toString.call(value).slice(8, -1).toLowerCase()
}
/**
* 判断是否为函数
* @param value
* @returns {boolean}
*/
export function isFunction (value: any): boolean {
if (!isObject(value)) {
return false
}
const tag = baseGetTag(value);
return tag === '[object Function]' || tag === '[object AsyncFunction]' ||
tag === '[object GeneratorFunction]' || tag === '[object Proxy]';
}
/**
* 判断是否为对象
* @param value
* @returns {boolean}
*/
export function isObject(value: any) {
const type = typeof value;
return value !== null && (type === 'object' || type === 'function')
}
/**
* is date value
* @param val
* @returns {boolean}
*/
export function isDate (val: any) {
return Object.prototype.toString.call(val) === '[object Date]';
}
/**
* is array buffer
* @param val
* @returns {boolean}
*/
export function isArrayBuffer(val: any) {
return Object.prototype.toString.call(val) === '[object ArrayBuffer]';
}
/**
* 判断是否为合法字符串
* @param value
* @returns {boolean}
*/
export function isString (value: any): boolean {
if (value == null) {
return false;
}
return typeof value === 'string' || (value.constructor !== null && value.constructor === String);
}
/**
* 判断是否为数字
* @param value
* @returns {boolean}
*/
export function isNumber (value: any) {
return Object.prototype.toString.call(value) === '[object Number]' && !isNaN(value);
}
/**
* check isEmpty object
* @param object
* @returns {boolean}
*/
export function isEmpty (object: {}) {
let property;
for (property in object) {
return false;
}
return !property;
}
/**
* check is null
* @param obj
* @returns {boolean}
*/
export function isNull (obj: any) {
return obj == null;
}
/**
* check is array
* @param arr
*/
export function isArray(arr: any): boolean {
return Array.isArray(arr);
}
/**
* assign object
* @param target
* @param sources
*/
export function assign(target: object, ...sources: any[]) {
return Object.assign(target, ...sources);
}
export function warnLog(msg: string) {
console.warn(`wind-layer: ${msg}`);
}
/**
* Get floored division
* @param a
* @param n
* @returns {Number} returns remainder of floored division,
* i.e., floor(a / n). Useful for consistent modulo of negative numbers.
* See http://en.wikipedia.org/wiki/Modulo_operation.
*/
export function floorMod (a: number, n: number) {
return a - n * Math.floor(a / n);
}
/**
* 检查值是否合法
* @param val
* @returns {boolean}
*/
export function isValide(val: any) {
return val !== undefined && val !== null && !isNaN(val);
}
export interface IGFSItem {
header: {
parameterCategory: number | string;
parameterNumber: number | string;
dx: number;
dy: number;
nx: number;
ny: number;
lo1: number;
lo2: number;
la1: number;
la2: number;
[key: string]: any;
};
data: number[];
}
/**
* format gfs json to vector
* @param data
*/
export function formatData(data: IGFSItem[]) {
let uComp: IGFSItem;
let vComp: IGFSItem;
if ((process.env.NODE_ENV as string) === ('development' as string)) {
console.time('format-data');
}
data.forEach(function (record: IGFSItem) {
switch (record.header.parameterCategory + "," + record.header.parameterNumber) {
case "1,2":
case "2,2":
uComp = record;
break;
case "1,3":
case "2,3":
vComp = record;
break;
}
});
// @ts-ignore
if (!vComp || !uComp) return;
const header = uComp.header;
const vectorField = new Field({
xmin: header.lo1, // 一般格点数据是按照矩形范围来切割,所以定义其经纬度范围
ymin: header.la1,
xmax: header.lo2,
ymax: header.la2,
deltaX: header.dx, // x(经度)增量
deltaY: header.dy, // y(维度)增量
cols: header.nx, // 列(可由 `(xmax - xmin) / deltaX` 得到)
rows: header.ny, // 行
us: uComp.data, // U分量
vs: vComp.data, // V分量
// wrappedX: false,
});
if ((process.env.NODE_ENV as string) === ('development' as string)) {
console.timeEnd('format-data');
}
return vectorField;
}
/**
* create canvas
* @param width
* @param height
* @param retina
* @param Canvas
* @returns {HTMLCanvasElement}
*/
export function createCanvas(width: number, height: number, retina: number, Canvas?: any): HTMLCanvasElement {
if (typeof document !== 'undefined') {
const canvas = document.createElement('canvas');
canvas.width = width * retina;
canvas.height = height * retina;
return canvas
} else {
// create a new canvas instance in node.js
// the canvas class needs to have a default constructor without any parameter
return new Canvas(width * retina, height * retina);
}
}
/**
* 移除 dom
* @param node
* @returns {removeDomNode}
*/
export function removeDomNode(node: HTMLElement | HTMLCanvasElement) {
if (!node) {
return null;
}
if (node.parentNode) {
node.parentNode.removeChild(node);
}
return node;
}
const keyword = /(\D+)/;
const hex = /^#([a-f0-9]{6})([a-f0-9]{2})?$/i;
// eslint-disable-next-line no-useless-escape
const rgba = /^rgba?\(\s*([+-]?\d+)\s*,\s*([+-]?\d+)\s*,\s*([+-]?\d+)\s*(?:,\s*([+-]?[\d\.]+)\s*)?\)$/;
const colorNames: {
[key: string]: number[]
} = {
'aliceblue': [240, 248, 255],
'antiquewhite': [250, 235, 215],
'aqua': [0, 255, 255],
'aquamarine': [127, 255, 212],
'azure': [240, 255, 255],
'beige': [245, 245, 220],
'bisque': [255, 228, 196],
'black': [0, 0, 0],
'blanchedalmond': [255, 235, 205],
'blue': [0, 0, 255],
'blueviolet': [138, 43, 226],
'brown': [165, 42, 42],
'burlywood': [222, 184, 135],
'cadetblue': [95, 158, 160],
'chartreuse': [127, 255, 0],
'chocolate': [210, 105, 30],
'coral': [255, 127, 80],
'cornflowerblue': [100, 149, 237],
'cornsilk': [255, 248, 220],
'crimson': [220, 20, 60],
'cyan': [0, 255, 255],
'darkblue': [0, 0, 139],
'darkcyan': [0, 139, 139],
'darkgoldenrod': [184, 134, 11],
'darkgray': [169, 169, 169],
'darkgreen': [0, 100, 0],
'darkgrey': [169, 169, 169],
'darkkhaki': [189, 183, 107],
'darkmagenta': [139, 0, 139],
'darkolivegreen': [85, 107, 47],
'darkorange': [255, 140, 0],
'darkorchid': [153, 50, 204],
'darkred': [139, 0, 0],
'darksalmon': [233, 150, 122],
'darkseagreen': [143, 188, 143],
'darkslateblue': [72, 61, 139],
'darkslategray': [47, 79, 79],
'darkslategrey': [47, 79, 79],
'darkturquoise': [0, 206, 209],
'darkviolet': [148, 0, 211],
'deeppink': [255, 20, 147],
'deepskyblue': [0, 191, 255],
'dimgray': [105, 105, 105],
'dimgrey': [105, 105, 105],
'dodgerblue': [30, 144, 255],
'firebrick': [178, 34, 34],
'floralwhite': [255, 250, 240],
'forestgreen': [34, 139, 34],
'fuchsia': [255, 0, 255],
'gainsboro': [220, 220, 220],
'ghostwhite': [248, 248, 255],
'gold': [255, 215, 0],
'goldenrod': [218, 165, 32],
'gray': [128, 128, 128],
'green': [0, 128, 0],
'greenyellow': [173, 255, 47],
'grey': [128, 128, 128],
'honeydew': [240, 255, 240],
'hotpink': [255, 105, 180],
'indianred': [205, 92, 92],
'indigo': [75, 0, 130],
'ivory': [255, 255, 240],
'khaki': [240, 230, 140],
'lavender': [230, 230, 250],
'lavenderblush': [255, 240, 245],
'lawngreen': [124, 252, 0],
'lemonchiffon': [255, 250, 205],
'lightblue': [173, 216, 230],
'lightcoral': [240, 128, 128],
'lightcyan': [224, 255, 255],
'lightgoldenrodyellow': [250, 250, 210],
'lightgray': [211, 211, 211],
'lightgreen': [144, 238, 144],
'lightgrey': [211, 211, 211],
'lightpink': [255, 182, 193],
'lightsalmon': [255, 160, 122],
'lightseagreen': [32, 178, 170],
'lightskyblue': [135, 206, 250],
'lightslategray': [119, 136, 153],
'lightslategrey': [119, 136, 153],
'lightsteelblue': [176, 196, 222],
'lightyellow': [255, 255, 224],
'lime': [0, 255, 0],
'limegreen': [50, 205, 50],
'linen': [250, 240, 230],
'magenta': [255, 0, 255],
'maroon': [128, 0, 0],
'mediumaquamarine': [102, 205, 170],
'mediumblue': [0, 0, 205],
'mediumorchid': [186, 85, 211],
'mediumpurple': [147, 112, 219],
'mediumseagreen': [60, 179, 113],
'mediumslateblue': [123, 104, 238],
'mediumspringgreen': [0, 250, 154],
'mediumturquoise': [72, 209, 204],
'mediumvioletred': [199, 21, 133],
'midnightblue': [25, 25, 112],
'mintcream': [245, 255, 250],
'mistyrose': [255, 228, 225],
'moccasin': [255, 228, 181],
'navajowhite': [255, 222, 173],
'navy': [0, 0, 128],
'oldlace': [253, 245, 230],
'olive': [128, 128, 0],
'olivedrab': [107, 142, 35],
'orange': [255, 165, 0],
'orangered': [255, 69, 0],
'orchid': [218, 112, 214],
'palegoldenrod': [238, 232, 170],
'palegreen': [152, 251, 152],
'paleturquoise': [175, 238, 238],
'palevioletred': [219, 112, 147],
'papayawhip': [255, 239, 213],
'peachpuff': [255, 218, 185],
'peru': [205, 133, 63],
'pink': [255, 192, 203],
'plum': [221, 160, 221],
'powderblue': [176, 224, 230],
'purple': [128, 0, 128],
'rebeccapurple': [102, 51, 153],
'red': [255, 0, 0],
'rosybrown': [188, 143, 143],
'royalblue': [65, 105, 225],
'saddlebrown': [139, 69, 19],
'salmon': [250, 128, 114],
'sandybrown': [244, 164, 96],
'seagreen': [46, 139, 87],
'seashell': [255, 245, 238],
'sienna': [160, 82, 45],
'silver': [192, 192, 192],
'skyblue': [135, 206, 235],
'slateblue': [106, 90, 205],
'slategray': [112, 128, 144],
'slategrey': [112, 128, 144],
'snow': [255, 250, 250],
'springgreen': [0, 255, 127],
'steelblue': [70, 130, 180],
'tan': [210, 180, 140],
'teal': [0, 128, 128],
'thistle': [216, 191, 216],
'tomato': [255, 99, 71],
'turquoise': [64, 224, 208],
'violet': [238, 130, 238],
'wheat': [245, 222, 179],
'white': [255, 255, 255],
'whitesmoke': [245, 245, 245],
'yellow': [255, 255, 0],
'yellowgreen': [154, 205, 50]
};
export function getColor(string: string) {
let rgb = [];
if (string.match(hex)) {
let match = string.match(hex);
if (match !== null) {
// @ts-ignore
match = match[1];
for (let i = 0; i < 3; i++) {
// https://jsperf.com/slice-vs-substr-vs-substring-methods-long-string/19
const i2 = i * 2;
// @ts-ignore
rgb[i] = parseInt(match.slice(i2, i2 + 2), 16);
}
rgb[3] = 1;
}
} else if (string.match(rgba)) {
const match = string.match(rgba);
for (let i = 0; i < 3; i++) {
// @ts-ignore
rgb[i] = parseInt(match[i + 1], 0);
}
// @ts-ignore
if (match[4]) {
// @ts-ignore
rgb[3] = parseFloat(match[4]);
} else {
rgb[3] = 1;
}
} else if (string.match(keyword)) {
const match = string.match(keyword);
// @ts-ignore
if (match[1] === 'transparent') {
return [0, 0, 0, 0];
}
// @ts-ignore
rgb = colorNames[match[1]];
if (!rgb) {
return null;
}
rgb[3] = 1;
return rgb;
} else {
return null;
}
return rgb;
} | the_stack |
import test from 'japa'
import { ApplicationContract } from '@ioc:Adonis/Core/Application'
import { LucidUser } from '../../src/UserProviders/Lucid/User'
import {
setup,
reset,
cleanup,
getUserModel,
getLucidProvider,
setupApplication,
} from '../../test-helpers'
let app: ApplicationContract
test.group('Lucid Provider | findById', (group) => {
group.before(async () => {
app = await setupApplication()
await setup(app)
})
group.after(async () => {
await cleanup(app)
})
group.afterEach(async () => {
await reset(app)
})
test('find a user using the id', async (assert) => {
assert.plan(5)
const User = getUserModel(app.container.use('Adonis/Lucid/Orm').BaseModel)
const user = await User.create({ username: 'virk', email: 'virk@adonisjs.com' })
const lucidProvider = getLucidProvider(app, { model: async () => User })
lucidProvider.before('findUser', async (query) => assert.exists(query))
lucidProvider.after('findUser', async (model) => assert.instanceOf(model, User))
const providerUser = await lucidProvider.findById(user.id)
assert.instanceOf(providerUser.user, User)
assert.equal(providerUser.user!.username, 'virk')
assert.equal(providerUser.user!.email, 'virk@adonisjs.com')
})
test('return null when unable to lookup using id', async (assert) => {
assert.plan(2)
const User = getUserModel(app.container.use('Adonis/Lucid/Orm').BaseModel)
const lucidProvider = getLucidProvider(app, { model: async () => User })
lucidProvider.before('findUser', async (query) => assert.exists(query))
lucidProvider.after('findUser', async () => {
throw new Error('not expected to be invoked')
})
const providerUser = await lucidProvider.findById(1)
assert.isNull(providerUser.user)
})
test('use custom connection', async (assert) => {
const User = getUserModel(app.container.use('Adonis/Lucid/Orm').BaseModel)
const user = await User.create({ username: 'virk', email: 'virk@adonisjs.com' })
const lucidProvider = getLucidProvider(app, { model: async () => User })
lucidProvider.setConnection('secondary')
const providerUser = await lucidProvider.findById(user.id)
assert.isNull(providerUser.user)
})
test('use custom query client', async (assert) => {
const User = getUserModel(app.container.use('Adonis/Lucid/Orm').BaseModel)
const user = await User.create({ username: 'virk', email: 'virk@adonisjs.com' })
const lucidProvider = getLucidProvider(app, { model: async () => User })
lucidProvider.setConnection(app.container.use('Adonis/Lucid/Database').connection('secondary'))
const providerUser = await lucidProvider.findById(user.id)
assert.isNull(providerUser.user)
})
test('use custom user builder', async (assert) => {
assert.plan(6)
const User = getUserModel(app.container.use('Adonis/Lucid/Orm').BaseModel)
const user = await User.create({ username: 'virk', email: 'virk@adonisjs.com' })
class CustomUser extends LucidUser<typeof User> {}
const lucidProvider = getLucidProvider(app, {
model: async () => User,
user: async () => CustomUser,
})
lucidProvider.before('findUser', async (query) => assert.exists(query))
lucidProvider.after('findUser', async (model) => assert.instanceOf(model, User))
const providerUser = await lucidProvider.findById(user.id)
assert.instanceOf(providerUser, CustomUser)
assert.instanceOf(providerUser.user, User)
assert.equal(providerUser.user!.username, 'virk')
assert.equal(providerUser.user!.email, 'virk@adonisjs.com')
})
})
test.group('Lucid Provider | findByUids', (group) => {
group.before(async () => {
app = await setupApplication()
await setup(app)
})
group.after(async () => {
await cleanup(app)
})
group.afterEach(async () => {
await reset(app)
})
test('find a user using one of the uids', async (assert) => {
assert.plan(9)
const User = getUserModel(app.container.use('Adonis/Lucid/Orm').BaseModel)
await User.create({ username: 'virk', email: 'virk@adonisjs.com' })
await User.create({ username: 'nikk', email: 'nikk@adonisjs.com' })
const lucidProvider = getLucidProvider(app, { model: async () => User })
lucidProvider.before('findUser', async (query) => assert.exists(query))
lucidProvider.after('findUser', async (user) => assert.instanceOf(user, User))
const providerUser = await lucidProvider.findByUid('virk')
const providerUser1 = await lucidProvider.findByUid('nikk@adonisjs.com')
assert.instanceOf(providerUser.user, User)
assert.equal(providerUser.user!.username, 'virk')
assert.equal(providerUser.user!.email, 'virk@adonisjs.com')
assert.equal(providerUser1.user!.username, 'nikk')
assert.equal(providerUser1.user!.email, 'nikk@adonisjs.com')
})
test('return null when unable to lookup user using uid', async (assert) => {
assert.plan(4)
const User = getUserModel(app.container.use('Adonis/Lucid/Orm').BaseModel)
const lucidProvider = getLucidProvider(app, { model: async () => User })
lucidProvider.before('findUser', async (query) => assert.exists(query))
lucidProvider.after('findUser', async () => {
throw new Error('not expected to be invoked')
})
const providerUser = await lucidProvider.findByUid('virk')
const providerUser1 = await lucidProvider.findByUid('virk@adonisjs.com')
assert.isNull(providerUser.user)
assert.isNull(providerUser1.user)
})
test('use custom connection', async (assert) => {
const User = getUserModel(app.container.use('Adonis/Lucid/Orm').BaseModel)
await User.create({ username: 'nikk', email: 'nikk@adonisjs.com' })
const lucidProvider = getLucidProvider(app, { model: async () => User })
lucidProvider.setConnection('secondary')
const providerUser = await lucidProvider.findByUid('nikk')
const providerUser1 = await lucidProvider.findByUid('nikk@adonisjs.com')
assert.isNull(providerUser.user)
assert.isNull(providerUser1.user)
})
test('use custom query client', async (assert) => {
const User = getUserModel(app.container.use('Adonis/Lucid/Orm').BaseModel)
await User.create({ username: 'nikk', email: 'nikk@adonisjs.com' })
const lucidProvider = getLucidProvider(app, { model: async () => User })
lucidProvider.setConnection(app.container.use('Adonis/Lucid/Database').connection('secondary'))
const providerUser = await lucidProvider.findByUid('nikk')
const providerUser1 = await lucidProvider.findByUid('nikk@adonisjs.com')
assert.isNull(providerUser.user)
assert.isNull(providerUser1.user)
})
test('find a user using the custom function', async (assert) => {
assert.plan(4)
const User = getUserModel(app.container.use('Adonis/Lucid/Orm').BaseModel)
User['findForAuth'] = function (_: any, uid: string) {
return this.query().where('username', uid).first()
}
await User.create({ username: 'virk', email: 'virk@adonisjs.com' })
await User.create({ username: 'nikk', email: 'nikk@adonisjs.com' })
const lucidProvider = getLucidProvider(app, { model: async () => User })
/**
* These won't be executed
*/
lucidProvider.before('findUser', async (query) => assert.exists(query))
lucidProvider.after('findUser', async (user) => assert.instanceOf(user, User))
const providerUser = await lucidProvider.findByUid('virk')
const providerUser1 = await lucidProvider.findByUid('nikk@adonisjs.com')
assert.instanceOf(providerUser.user, User)
assert.equal(providerUser.user!.username, 'virk')
assert.equal(providerUser.user!.email, 'virk@adonisjs.com')
assert.isNull(providerUser1.user)
})
})
test.group('Lucid Provider | findByRememberMeToken', (group) => {
group.before(async () => {
app = await setupApplication()
await setup(app)
})
group.after(async () => {
await cleanup(app)
})
group.afterEach(async () => {
await reset(app)
})
test('find a user using a token', async (assert) => {
assert.plan(5)
const User = getUserModel(app.container.use('Adonis/Lucid/Orm').BaseModel)
const user = await User.create({
username: 'virk',
email: 'virk@adonisjs.com',
rememberMeToken: '123',
})
await User.create({ username: 'nikk', email: 'nikk@adonisjs.com' })
const lucidProvider = getLucidProvider(app, { model: async () => User })
lucidProvider.before('findUser', async (query) => assert.exists(query))
lucidProvider.after('findUser', async (model) => assert.instanceOf(model, User))
const providerUser = await lucidProvider.findByRememberMeToken(user.id, '123')
assert.instanceOf(providerUser.user, User)
assert.equal(providerUser.user!.username, 'virk')
assert.equal(providerUser.user!.email, 'virk@adonisjs.com')
})
test("return null when user doesn't exists", async (assert) => {
const User = getUserModel(app.container.use('Adonis/Lucid/Orm').BaseModel)
const lucidProvider = getLucidProvider(app, { model: async () => User })
const providerUser = await lucidProvider.findByRememberMeToken(1, '123')
assert.isNull(providerUser.user)
})
test('return null when users exists but token is missing', async (assert) => {
assert.plan(2)
const User = getUserModel(app.container.use('Adonis/Lucid/Orm').BaseModel)
const user = await User.create({ username: 'nikk', email: 'nikk@adonisjs.com' })
await User.create({ username: 'virk', email: 'virk@adonisjs.com', rememberMeToken: '123' })
const lucidProvider = getLucidProvider(app, { model: async () => User })
lucidProvider.before('findUser', async (query) => assert.exists(query))
lucidProvider.after('findUser', async () => {
throw new Error('not expected to be invoked')
})
const providerUser = await lucidProvider.findByRememberMeToken(user.id, '123')
assert.isNull(providerUser.user)
})
test('use custom connection', async (assert) => {
const User = getUserModel(app.container.use('Adonis/Lucid/Orm').BaseModel)
const user = await User.create({
username: 'virk',
email: 'virk@adonisjs.com',
rememberMeToken: '123',
})
const lucidProvider = getLucidProvider(app, { model: async () => User })
lucidProvider.setConnection('secondary')
const providerUser = await lucidProvider.findByRememberMeToken(user.id, '123')
assert.isNull(providerUser.user)
})
test('use custom query client', async (assert) => {
const User = getUserModel(app.container.use('Adonis/Lucid/Orm').BaseModel)
const user = await User.create({
username: 'virk',
email: 'virk@adonisjs.com',
rememberMeToken: '123',
})
const lucidProvider = getLucidProvider(app, { model: async () => User })
lucidProvider.setConnection(app.container.use('Adonis/Lucid/Database').connection('secondary'))
const providerUser = await lucidProvider.findByRememberMeToken(user.id, '123')
assert.isNull(providerUser.user)
})
}) | the_stack |
import * as msRest from "@azure/ms-rest-js";
import * as msRestAzure from "@azure/ms-rest-azure-js";
import * as Models from "../models";
import * as Mappers from "../models/vaultsMappers";
import * as Parameters from "../models/parameters";
import { KeyVaultManagementClientContext } from "../keyVaultManagementClientContext";
/** Class representing a Vaults. */
export class Vaults {
private readonly client: KeyVaultManagementClientContext;
/**
* Create a Vaults.
* @param {KeyVaultManagementClientContext} client Reference to the service client.
*/
constructor(client: KeyVaultManagementClientContext) {
this.client = client;
}
/**
* Create or update a key vault in the specified subscription.
* @param resourceGroupName The name of the Resource Group to which the server belongs.
* @param vaultName Name of the vault
* @param parameters Parameters to create or update the vault
* @param [options] The optional parameters
* @returns Promise<Models.VaultsCreateOrUpdateResponse>
*/
createOrUpdate(resourceGroupName: string, vaultName: string, parameters: Models.VaultCreateOrUpdateParameters, options?: msRest.RequestOptionsBase): Promise<Models.VaultsCreateOrUpdateResponse>;
/**
* @param resourceGroupName The name of the Resource Group to which the server belongs.
* @param vaultName Name of the vault
* @param parameters Parameters to create or update the vault
* @param callback The callback
*/
createOrUpdate(resourceGroupName: string, vaultName: string, parameters: Models.VaultCreateOrUpdateParameters, callback: msRest.ServiceCallback<Models.Vault>): void;
/**
* @param resourceGroupName The name of the Resource Group to which the server belongs.
* @param vaultName Name of the vault
* @param parameters Parameters to create or update the vault
* @param options The optional parameters
* @param callback The callback
*/
createOrUpdate(resourceGroupName: string, vaultName: string, parameters: Models.VaultCreateOrUpdateParameters, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.Vault>): void;
createOrUpdate(resourceGroupName: string, vaultName: string, parameters: Models.VaultCreateOrUpdateParameters, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.Vault>, callback?: msRest.ServiceCallback<Models.Vault>): Promise<Models.VaultsCreateOrUpdateResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
vaultName,
parameters,
options
},
createOrUpdateOperationSpec,
callback) as Promise<Models.VaultsCreateOrUpdateResponse>;
}
/**
* Update a key vault in the specified subscription.
* @param resourceGroupName The name of the Resource Group to which the server belongs.
* @param vaultName Name of the vault
* @param parameters Parameters to patch the vault
* @param [options] The optional parameters
* @returns Promise<Models.VaultsUpdateResponse>
*/
update(resourceGroupName: string, vaultName: string, parameters: Models.VaultPatchParameters, options?: msRest.RequestOptionsBase): Promise<Models.VaultsUpdateResponse>;
/**
* @param resourceGroupName The name of the Resource Group to which the server belongs.
* @param vaultName Name of the vault
* @param parameters Parameters to patch the vault
* @param callback The callback
*/
update(resourceGroupName: string, vaultName: string, parameters: Models.VaultPatchParameters, callback: msRest.ServiceCallback<Models.Vault>): void;
/**
* @param resourceGroupName The name of the Resource Group to which the server belongs.
* @param vaultName Name of the vault
* @param parameters Parameters to patch the vault
* @param options The optional parameters
* @param callback The callback
*/
update(resourceGroupName: string, vaultName: string, parameters: Models.VaultPatchParameters, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.Vault>): void;
update(resourceGroupName: string, vaultName: string, parameters: Models.VaultPatchParameters, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.Vault>, callback?: msRest.ServiceCallback<Models.Vault>): Promise<Models.VaultsUpdateResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
vaultName,
parameters,
options
},
updateOperationSpec,
callback) as Promise<Models.VaultsUpdateResponse>;
}
/**
* Deletes the specified Azure key vault.
* @param resourceGroupName The name of the Resource Group to which the vault belongs.
* @param vaultName The name of the vault to delete
* @param [options] The optional parameters
* @returns Promise<msRest.RestResponse>
*/
deleteMethod(resourceGroupName: string, vaultName: string, options?: msRest.RequestOptionsBase): Promise<msRest.RestResponse>;
/**
* @param resourceGroupName The name of the Resource Group to which the vault belongs.
* @param vaultName The name of the vault to delete
* @param callback The callback
*/
deleteMethod(resourceGroupName: string, vaultName: string, callback: msRest.ServiceCallback<void>): void;
/**
* @param resourceGroupName The name of the Resource Group to which the vault belongs.
* @param vaultName The name of the vault to delete
* @param options The optional parameters
* @param callback The callback
*/
deleteMethod(resourceGroupName: string, vaultName: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<void>): void;
deleteMethod(resourceGroupName: string, vaultName: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<void>, callback?: msRest.ServiceCallback<void>): Promise<msRest.RestResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
vaultName,
options
},
deleteMethodOperationSpec,
callback);
}
/**
* Gets the specified Azure key vault.
* @param resourceGroupName The name of the Resource Group to which the vault belongs.
* @param vaultName The name of the vault.
* @param [options] The optional parameters
* @returns Promise<Models.VaultsGetResponse>
*/
get(resourceGroupName: string, vaultName: string, options?: msRest.RequestOptionsBase): Promise<Models.VaultsGetResponse>;
/**
* @param resourceGroupName The name of the Resource Group to which the vault belongs.
* @param vaultName The name of the vault.
* @param callback The callback
*/
get(resourceGroupName: string, vaultName: string, callback: msRest.ServiceCallback<Models.Vault>): void;
/**
* @param resourceGroupName The name of the Resource Group to which the vault belongs.
* @param vaultName The name of the vault.
* @param options The optional parameters
* @param callback The callback
*/
get(resourceGroupName: string, vaultName: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.Vault>): void;
get(resourceGroupName: string, vaultName: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.Vault>, callback?: msRest.ServiceCallback<Models.Vault>): Promise<Models.VaultsGetResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
vaultName,
options
},
getOperationSpec,
callback) as Promise<Models.VaultsGetResponse>;
}
/**
* Update access policies in a key vault in the specified subscription.
* @param resourceGroupName The name of the Resource Group to which the vault belongs.
* @param vaultName Name of the vault
* @param operationKind Name of the operation. Possible values include: 'add', 'replace', 'remove'
* @param parameters Access policy to merge into the vault
* @param [options] The optional parameters
* @returns Promise<Models.VaultsUpdateAccessPolicyResponse>
*/
updateAccessPolicy(resourceGroupName: string, vaultName: string, operationKind: Models.AccessPolicyUpdateKind, parameters: Models.VaultAccessPolicyParameters, options?: msRest.RequestOptionsBase): Promise<Models.VaultsUpdateAccessPolicyResponse>;
/**
* @param resourceGroupName The name of the Resource Group to which the vault belongs.
* @param vaultName Name of the vault
* @param operationKind Name of the operation. Possible values include: 'add', 'replace', 'remove'
* @param parameters Access policy to merge into the vault
* @param callback The callback
*/
updateAccessPolicy(resourceGroupName: string, vaultName: string, operationKind: Models.AccessPolicyUpdateKind, parameters: Models.VaultAccessPolicyParameters, callback: msRest.ServiceCallback<Models.VaultAccessPolicyParameters>): void;
/**
* @param resourceGroupName The name of the Resource Group to which the vault belongs.
* @param vaultName Name of the vault
* @param operationKind Name of the operation. Possible values include: 'add', 'replace', 'remove'
* @param parameters Access policy to merge into the vault
* @param options The optional parameters
* @param callback The callback
*/
updateAccessPolicy(resourceGroupName: string, vaultName: string, operationKind: Models.AccessPolicyUpdateKind, parameters: Models.VaultAccessPolicyParameters, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.VaultAccessPolicyParameters>): void;
updateAccessPolicy(resourceGroupName: string, vaultName: string, operationKind: Models.AccessPolicyUpdateKind, parameters: Models.VaultAccessPolicyParameters, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.VaultAccessPolicyParameters>, callback?: msRest.ServiceCallback<Models.VaultAccessPolicyParameters>): Promise<Models.VaultsUpdateAccessPolicyResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
vaultName,
operationKind,
parameters,
options
},
updateAccessPolicyOperationSpec,
callback) as Promise<Models.VaultsUpdateAccessPolicyResponse>;
}
/**
* The List operation gets information about the vaults associated with the subscription and within
* the specified resource group.
* @param resourceGroupName The name of the Resource Group to which the vault belongs.
* @param [options] The optional parameters
* @returns Promise<Models.VaultsListByResourceGroupResponse>
*/
listByResourceGroup(resourceGroupName: string, options?: Models.VaultsListByResourceGroupOptionalParams): Promise<Models.VaultsListByResourceGroupResponse>;
/**
* @param resourceGroupName The name of the Resource Group to which the vault belongs.
* @param callback The callback
*/
listByResourceGroup(resourceGroupName: string, callback: msRest.ServiceCallback<Models.VaultListResult>): void;
/**
* @param resourceGroupName The name of the Resource Group to which the vault belongs.
* @param options The optional parameters
* @param callback The callback
*/
listByResourceGroup(resourceGroupName: string, options: Models.VaultsListByResourceGroupOptionalParams, callback: msRest.ServiceCallback<Models.VaultListResult>): void;
listByResourceGroup(resourceGroupName: string, options?: Models.VaultsListByResourceGroupOptionalParams | msRest.ServiceCallback<Models.VaultListResult>, callback?: msRest.ServiceCallback<Models.VaultListResult>): Promise<Models.VaultsListByResourceGroupResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
options
},
listByResourceGroupOperationSpec,
callback) as Promise<Models.VaultsListByResourceGroupResponse>;
}
/**
* The List operation gets information about the vaults associated with the subscription.
* @param [options] The optional parameters
* @returns Promise<Models.VaultsListBySubscriptionResponse>
*/
listBySubscription(options?: Models.VaultsListBySubscriptionOptionalParams): Promise<Models.VaultsListBySubscriptionResponse>;
/**
* @param callback The callback
*/
listBySubscription(callback: msRest.ServiceCallback<Models.VaultListResult>): void;
/**
* @param options The optional parameters
* @param callback The callback
*/
listBySubscription(options: Models.VaultsListBySubscriptionOptionalParams, callback: msRest.ServiceCallback<Models.VaultListResult>): void;
listBySubscription(options?: Models.VaultsListBySubscriptionOptionalParams | msRest.ServiceCallback<Models.VaultListResult>, callback?: msRest.ServiceCallback<Models.VaultListResult>): Promise<Models.VaultsListBySubscriptionResponse> {
return this.client.sendOperationRequest(
{
options
},
listBySubscriptionOperationSpec,
callback) as Promise<Models.VaultsListBySubscriptionResponse>;
}
/**
* Gets information about the deleted vaults in a subscription.
* @param [options] The optional parameters
* @returns Promise<Models.VaultsListDeletedResponse>
*/
listDeleted(options?: msRest.RequestOptionsBase): Promise<Models.VaultsListDeletedResponse>;
/**
* @param callback The callback
*/
listDeleted(callback: msRest.ServiceCallback<Models.DeletedVaultListResult>): void;
/**
* @param options The optional parameters
* @param callback The callback
*/
listDeleted(options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.DeletedVaultListResult>): void;
listDeleted(options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.DeletedVaultListResult>, callback?: msRest.ServiceCallback<Models.DeletedVaultListResult>): Promise<Models.VaultsListDeletedResponse> {
return this.client.sendOperationRequest(
{
options
},
listDeletedOperationSpec,
callback) as Promise<Models.VaultsListDeletedResponse>;
}
/**
* Gets the deleted Azure key vault.
* @param vaultName The name of the vault.
* @param location The location of the deleted vault.
* @param [options] The optional parameters
* @returns Promise<Models.VaultsGetDeletedResponse>
*/
getDeleted(vaultName: string, location: string, options?: msRest.RequestOptionsBase): Promise<Models.VaultsGetDeletedResponse>;
/**
* @param vaultName The name of the vault.
* @param location The location of the deleted vault.
* @param callback The callback
*/
getDeleted(vaultName: string, location: string, callback: msRest.ServiceCallback<Models.DeletedVault>): void;
/**
* @param vaultName The name of the vault.
* @param location The location of the deleted vault.
* @param options The optional parameters
* @param callback The callback
*/
getDeleted(vaultName: string, location: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.DeletedVault>): void;
getDeleted(vaultName: string, location: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.DeletedVault>, callback?: msRest.ServiceCallback<Models.DeletedVault>): Promise<Models.VaultsGetDeletedResponse> {
return this.client.sendOperationRequest(
{
vaultName,
location,
options
},
getDeletedOperationSpec,
callback) as Promise<Models.VaultsGetDeletedResponse>;
}
/**
* Permanently deletes the specified vault. aka Purges the deleted Azure key vault.
* @param vaultName The name of the soft-deleted vault.
* @param location The location of the soft-deleted vault.
* @param [options] The optional parameters
* @returns Promise<msRest.RestResponse>
*/
purgeDeleted(vaultName: string, location: string, options?: msRest.RequestOptionsBase): Promise<msRest.RestResponse> {
return this.beginPurgeDeleted(vaultName,location,options)
.then(lroPoller => lroPoller.pollUntilFinished());
}
/**
* The List operation gets information about the vaults associated with the subscription.
* @param [options] The optional parameters
* @returns Promise<Models.VaultsListResponse>
*/
list(options?: Models.VaultsListOptionalParams): Promise<Models.VaultsListResponse>;
/**
* @param callback The callback
*/
list(callback: msRest.ServiceCallback<Models.ResourceListResult>): void;
/**
* @param options The optional parameters
* @param callback The callback
*/
list(options: Models.VaultsListOptionalParams, callback: msRest.ServiceCallback<Models.ResourceListResult>): void;
list(options?: Models.VaultsListOptionalParams | msRest.ServiceCallback<Models.ResourceListResult>, callback?: msRest.ServiceCallback<Models.ResourceListResult>): Promise<Models.VaultsListResponse> {
return this.client.sendOperationRequest(
{
options
},
listOperationSpec,
callback) as Promise<Models.VaultsListResponse>;
}
/**
* Checks that the vault name is valid and is not already in use.
* @param vaultName The name of the vault.
* @param [options] The optional parameters
* @returns Promise<Models.VaultsCheckNameAvailabilityResponse>
*/
checkNameAvailability(vaultName: Models.VaultCheckNameAvailabilityParameters, options?: msRest.RequestOptionsBase): Promise<Models.VaultsCheckNameAvailabilityResponse>;
/**
* @param vaultName The name of the vault.
* @param callback The callback
*/
checkNameAvailability(vaultName: Models.VaultCheckNameAvailabilityParameters, callback: msRest.ServiceCallback<Models.CheckNameAvailabilityResult>): void;
/**
* @param vaultName The name of the vault.
* @param options The optional parameters
* @param callback The callback
*/
checkNameAvailability(vaultName: Models.VaultCheckNameAvailabilityParameters, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.CheckNameAvailabilityResult>): void;
checkNameAvailability(vaultName: Models.VaultCheckNameAvailabilityParameters, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.CheckNameAvailabilityResult>, callback?: msRest.ServiceCallback<Models.CheckNameAvailabilityResult>): Promise<Models.VaultsCheckNameAvailabilityResponse> {
return this.client.sendOperationRequest(
{
vaultName,
options
},
checkNameAvailabilityOperationSpec,
callback) as Promise<Models.VaultsCheckNameAvailabilityResponse>;
}
/**
* Permanently deletes the specified vault. aka Purges the deleted Azure key vault.
* @param vaultName The name of the soft-deleted vault.
* @param location The location of the soft-deleted vault.
* @param [options] The optional parameters
* @returns Promise<msRestAzure.LROPoller>
*/
beginPurgeDeleted(vaultName: string, location: string, options?: msRest.RequestOptionsBase): Promise<msRestAzure.LROPoller> {
return this.client.sendLRORequest(
{
vaultName,
location,
options
},
beginPurgeDeletedOperationSpec,
options);
}
/**
* The List operation gets information about the vaults associated with the subscription and within
* the specified resource group.
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param [options] The optional parameters
* @returns Promise<Models.VaultsListByResourceGroupNextResponse>
*/
listByResourceGroupNext(nextPageLink: string, options?: msRest.RequestOptionsBase): Promise<Models.VaultsListByResourceGroupNextResponse>;
/**
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param callback The callback
*/
listByResourceGroupNext(nextPageLink: string, callback: msRest.ServiceCallback<Models.VaultListResult>): void;
/**
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param options The optional parameters
* @param callback The callback
*/
listByResourceGroupNext(nextPageLink: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.VaultListResult>): void;
listByResourceGroupNext(nextPageLink: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.VaultListResult>, callback?: msRest.ServiceCallback<Models.VaultListResult>): Promise<Models.VaultsListByResourceGroupNextResponse> {
return this.client.sendOperationRequest(
{
nextPageLink,
options
},
listByResourceGroupNextOperationSpec,
callback) as Promise<Models.VaultsListByResourceGroupNextResponse>;
}
/**
* The List operation gets information about the vaults associated with the subscription.
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param [options] The optional parameters
* @returns Promise<Models.VaultsListBySubscriptionNextResponse>
*/
listBySubscriptionNext(nextPageLink: string, options?: msRest.RequestOptionsBase): Promise<Models.VaultsListBySubscriptionNextResponse>;
/**
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param callback The callback
*/
listBySubscriptionNext(nextPageLink: string, callback: msRest.ServiceCallback<Models.VaultListResult>): void;
/**
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param options The optional parameters
* @param callback The callback
*/
listBySubscriptionNext(nextPageLink: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.VaultListResult>): void;
listBySubscriptionNext(nextPageLink: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.VaultListResult>, callback?: msRest.ServiceCallback<Models.VaultListResult>): Promise<Models.VaultsListBySubscriptionNextResponse> {
return this.client.sendOperationRequest(
{
nextPageLink,
options
},
listBySubscriptionNextOperationSpec,
callback) as Promise<Models.VaultsListBySubscriptionNextResponse>;
}
/**
* Gets information about the deleted vaults in a subscription.
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param [options] The optional parameters
* @returns Promise<Models.VaultsListDeletedNextResponse>
*/
listDeletedNext(nextPageLink: string, options?: msRest.RequestOptionsBase): Promise<Models.VaultsListDeletedNextResponse>;
/**
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param callback The callback
*/
listDeletedNext(nextPageLink: string, callback: msRest.ServiceCallback<Models.DeletedVaultListResult>): void;
/**
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param options The optional parameters
* @param callback The callback
*/
listDeletedNext(nextPageLink: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.DeletedVaultListResult>): void;
listDeletedNext(nextPageLink: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.DeletedVaultListResult>, callback?: msRest.ServiceCallback<Models.DeletedVaultListResult>): Promise<Models.VaultsListDeletedNextResponse> {
return this.client.sendOperationRequest(
{
nextPageLink,
options
},
listDeletedNextOperationSpec,
callback) as Promise<Models.VaultsListDeletedNextResponse>;
}
/**
* The List operation gets information about the vaults associated with the subscription.
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param [options] The optional parameters
* @returns Promise<Models.VaultsListNextResponse>
*/
listNext(nextPageLink: string, options?: msRest.RequestOptionsBase): Promise<Models.VaultsListNextResponse>;
/**
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param callback The callback
*/
listNext(nextPageLink: string, callback: msRest.ServiceCallback<Models.ResourceListResult>): void;
/**
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param options The optional parameters
* @param callback The callback
*/
listNext(nextPageLink: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.ResourceListResult>): void;
listNext(nextPageLink: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.ResourceListResult>, callback?: msRest.ServiceCallback<Models.ResourceListResult>): Promise<Models.VaultsListNextResponse> {
return this.client.sendOperationRequest(
{
nextPageLink,
options
},
listNextOperationSpec,
callback) as Promise<Models.VaultsListNextResponse>;
}
}
// Operation Specifications
const serializer = new msRest.Serializer(Mappers);
const createOrUpdateOperationSpec: msRest.OperationSpec = {
httpMethod: "PUT",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.KeyVault/vaults/{vaultName}",
urlParameters: [
Parameters.resourceGroupName,
Parameters.vaultName0,
Parameters.subscriptionId
],
queryParameters: [
Parameters.apiVersion0
],
headerParameters: [
Parameters.acceptLanguage
],
requestBody: {
parameterPath: "parameters",
mapper: {
...Mappers.VaultCreateOrUpdateParameters,
required: true
}
},
responses: {
200: {
bodyMapper: Mappers.Vault
},
201: {
bodyMapper: Mappers.Vault
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const updateOperationSpec: msRest.OperationSpec = {
httpMethod: "PATCH",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.KeyVault/vaults/{vaultName}",
urlParameters: [
Parameters.resourceGroupName,
Parameters.vaultName0,
Parameters.subscriptionId
],
queryParameters: [
Parameters.apiVersion0
],
headerParameters: [
Parameters.acceptLanguage
],
requestBody: {
parameterPath: "parameters",
mapper: {
...Mappers.VaultPatchParameters,
required: true
}
},
responses: {
200: {
bodyMapper: Mappers.Vault
},
201: {
bodyMapper: Mappers.Vault
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const deleteMethodOperationSpec: msRest.OperationSpec = {
httpMethod: "DELETE",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.KeyVault/vaults/{vaultName}",
urlParameters: [
Parameters.resourceGroupName,
Parameters.vaultName1,
Parameters.subscriptionId
],
queryParameters: [
Parameters.apiVersion0
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const getOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.KeyVault/vaults/{vaultName}",
urlParameters: [
Parameters.resourceGroupName,
Parameters.vaultName1,
Parameters.subscriptionId
],
queryParameters: [
Parameters.apiVersion0
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.Vault
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const updateAccessPolicyOperationSpec: msRest.OperationSpec = {
httpMethod: "PUT",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.KeyVault/vaults/{vaultName}/accessPolicies/{operationKind}",
urlParameters: [
Parameters.resourceGroupName,
Parameters.vaultName0,
Parameters.operationKind,
Parameters.subscriptionId
],
queryParameters: [
Parameters.apiVersion0
],
headerParameters: [
Parameters.acceptLanguage
],
requestBody: {
parameterPath: "parameters",
mapper: {
...Mappers.VaultAccessPolicyParameters,
required: true
}
},
responses: {
200: {
bodyMapper: Mappers.VaultAccessPolicyParameters
},
201: {
bodyMapper: Mappers.VaultAccessPolicyParameters
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const listByResourceGroupOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.KeyVault/vaults",
urlParameters: [
Parameters.resourceGroupName,
Parameters.subscriptionId
],
queryParameters: [
Parameters.top,
Parameters.apiVersion0
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.VaultListResult
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const listBySubscriptionOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "subscriptions/{subscriptionId}/providers/Microsoft.KeyVault/vaults",
urlParameters: [
Parameters.subscriptionId
],
queryParameters: [
Parameters.top,
Parameters.apiVersion0
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.VaultListResult
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const listDeletedOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "subscriptions/{subscriptionId}/providers/Microsoft.KeyVault/deletedVaults",
urlParameters: [
Parameters.subscriptionId
],
queryParameters: [
Parameters.apiVersion0
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.DeletedVaultListResult
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const getDeletedOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "subscriptions/{subscriptionId}/providers/Microsoft.KeyVault/locations/{location}/deletedVaults/{vaultName}",
urlParameters: [
Parameters.vaultName1,
Parameters.location,
Parameters.subscriptionId
],
queryParameters: [
Parameters.apiVersion0
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.DeletedVault
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const listOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "subscriptions/{subscriptionId}/resources",
urlParameters: [
Parameters.subscriptionId
],
queryParameters: [
Parameters.filter,
Parameters.top,
Parameters.apiVersion1
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.ResourceListResult
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const checkNameAvailabilityOperationSpec: msRest.OperationSpec = {
httpMethod: "POST",
path: "subscriptions/{subscriptionId}/providers/Microsoft.KeyVault/checkNameAvailability",
urlParameters: [
Parameters.subscriptionId
],
queryParameters: [
Parameters.apiVersion0
],
headerParameters: [
Parameters.acceptLanguage
],
requestBody: {
parameterPath: "vaultName",
mapper: {
...Mappers.VaultCheckNameAvailabilityParameters,
required: true
}
},
responses: {
200: {
bodyMapper: Mappers.CheckNameAvailabilityResult
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const beginPurgeDeletedOperationSpec: msRest.OperationSpec = {
httpMethod: "POST",
path: "subscriptions/{subscriptionId}/providers/Microsoft.KeyVault/locations/{location}/deletedVaults/{vaultName}/purge",
urlParameters: [
Parameters.vaultName1,
Parameters.location,
Parameters.subscriptionId
],
queryParameters: [
Parameters.apiVersion0
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {},
202: {},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const listByResourceGroupNextOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
baseUrl: "https://management.azure.com",
path: "{nextLink}",
urlParameters: [
Parameters.nextPageLink
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.VaultListResult
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const listBySubscriptionNextOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
baseUrl: "https://management.azure.com",
path: "{nextLink}",
urlParameters: [
Parameters.nextPageLink
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.VaultListResult
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const listDeletedNextOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
baseUrl: "https://management.azure.com",
path: "{nextLink}",
urlParameters: [
Parameters.nextPageLink
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.DeletedVaultListResult
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const listNextOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
baseUrl: "https://management.azure.com",
path: "{nextLink}",
urlParameters: [
Parameters.nextPageLink
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.ResourceListResult
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
}; | the_stack |
import { PhysicalConstants } from './PhysicalConstants';
import { Units } from './Units';
/**
* AstronomicalConstants - class representing the most important astronomical constants.
*
* @author <b>Mariusz Gromada</b><br>
* <a href="mailto:mariuszgromada.org@gmail.com">mariuszgromada.org@gmail.com</a><br>
* <a href="http://github.com/mariuszgromada/MathParser.org-mXparser" target="_blank">mXparser on GitHub</a><br>
*
* @version 4.0.0
* @class
*/
export class AstronomicalConstants {
/**
* Light year
*/
public static LIGHT_YEAR: number; public static LIGHT_YEAR_$LI$(): number { if (AstronomicalConstants.LIGHT_YEAR == null) { AstronomicalConstants.LIGHT_YEAR = PhysicalConstants.LIGHT_SPEED_$LI$() * Units.JULIAN_YEAR_$LI$(); } return AstronomicalConstants.LIGHT_YEAR; }
/**
* Astronomical unit
*/
public static ASTRONOMICAL_UNIT: number; public static ASTRONOMICAL_UNIT_$LI$(): number { if (AstronomicalConstants.ASTRONOMICAL_UNIT == null) { AstronomicalConstants.ASTRONOMICAL_UNIT = 1.495978707E11 * Units.METRE; } return AstronomicalConstants.ASTRONOMICAL_UNIT; }
/**
* Parsec
*/
public static PARSEC: number; public static PARSEC_$LI$(): number { if (AstronomicalConstants.PARSEC == null) { AstronomicalConstants.PARSEC = 206264.806247096 * AstronomicalConstants.ASTRONOMICAL_UNIT_$LI$(); } return AstronomicalConstants.PARSEC; }
/**
* Kiloparsec
*/
public static KILOPARSEC: number; public static KILOPARSEC_$LI$(): number { if (AstronomicalConstants.KILOPARSEC == null) { AstronomicalConstants.KILOPARSEC = Units.KILO * AstronomicalConstants.PARSEC_$LI$(); } return AstronomicalConstants.KILOPARSEC; }
/**
* Earth equatorial radius
*/
public static EARTH_RADIUS_EQUATORIAL: number; public static EARTH_RADIUS_EQUATORIAL_$LI$(): number { if (AstronomicalConstants.EARTH_RADIUS_EQUATORIAL == null) { AstronomicalConstants.EARTH_RADIUS_EQUATORIAL = 6378.137 * Units.KILOMETRE_$LI$(); } return AstronomicalConstants.EARTH_RADIUS_EQUATORIAL; }
/**
* Earth polar radius
*/
public static EARTH_RADIUS_POLAR: number; public static EARTH_RADIUS_POLAR_$LI$(): number { if (AstronomicalConstants.EARTH_RADIUS_POLAR == null) { AstronomicalConstants.EARTH_RADIUS_POLAR = 6356.7523 * Units.KILOMETRE_$LI$(); } return AstronomicalConstants.EARTH_RADIUS_POLAR; }
/**
* Earth mean radius
*/
public static EARTH_RADIUS_MEAN: number; public static EARTH_RADIUS_MEAN_$LI$(): number { if (AstronomicalConstants.EARTH_RADIUS_MEAN == null) { AstronomicalConstants.EARTH_RADIUS_MEAN = 6371.0088 * Units.KILOMETRE_$LI$(); } return AstronomicalConstants.EARTH_RADIUS_MEAN; }
/**
* Earth mass
*/
public static EARTH_MASS: number; public static EARTH_MASS_$LI$(): number { if (AstronomicalConstants.EARTH_MASS == null) { AstronomicalConstants.EARTH_MASS = 5.9722 * Units.YOTTA * Units.KILOGRAM; } return AstronomicalConstants.EARTH_MASS; }
/**
* Earth semi-major axis
*/
public static EARTH_SEMI_MAJOR_AXIS: number; public static EARTH_SEMI_MAJOR_AXIS_$LI$(): number { if (AstronomicalConstants.EARTH_SEMI_MAJOR_AXIS == null) { AstronomicalConstants.EARTH_SEMI_MAJOR_AXIS = 1.000001018 * AstronomicalConstants.ASTRONOMICAL_UNIT_$LI$(); } return AstronomicalConstants.EARTH_SEMI_MAJOR_AXIS; }
/**
* Moon mean radius
*/
public static MOON_RADIUS_MEAN: number; public static MOON_RADIUS_MEAN_$LI$(): number { if (AstronomicalConstants.MOON_RADIUS_MEAN == null) { AstronomicalConstants.MOON_RADIUS_MEAN = 1737.1 * Units.KILOMETRE_$LI$(); } return AstronomicalConstants.MOON_RADIUS_MEAN; }
/**
* Moon mass
*/
public static MOON_MASS: number; public static MOON_MASS_$LI$(): number { if (AstronomicalConstants.MOON_MASS == null) { AstronomicalConstants.MOON_MASS = 0.012300037 * AstronomicalConstants.EARTH_MASS_$LI$(); } return AstronomicalConstants.MOON_MASS; }
/**
* Moon semi-major axis
*/
public static MONN_SEMI_MAJOR_AXIS: number; public static MONN_SEMI_MAJOR_AXIS_$LI$(): number { if (AstronomicalConstants.MONN_SEMI_MAJOR_AXIS == null) { AstronomicalConstants.MONN_SEMI_MAJOR_AXIS = 384399 * Units.KILOMETRE_$LI$(); } return AstronomicalConstants.MONN_SEMI_MAJOR_AXIS; }
/**
* Solar radius
*/
public static SOLAR_RADIUS: number; public static SOLAR_RADIUS_$LI$(): number { if (AstronomicalConstants.SOLAR_RADIUS == null) { AstronomicalConstants.SOLAR_RADIUS = 695700 * Units.KILOMETRE_$LI$(); } return AstronomicalConstants.SOLAR_RADIUS; }
/**
* Solar mass
*/
public static SOLAR_MASS: number; public static SOLAR_MASS_$LI$(): number { if (AstronomicalConstants.SOLAR_MASS == null) { AstronomicalConstants.SOLAR_MASS = 332946.0487 * AstronomicalConstants.EARTH_MASS_$LI$(); } return AstronomicalConstants.SOLAR_MASS; }
/**
* Mercury radius
*/
public static MERCURY_RADIUS_MEAN: number; public static MERCURY_RADIUS_MEAN_$LI$(): number { if (AstronomicalConstants.MERCURY_RADIUS_MEAN == null) { AstronomicalConstants.MERCURY_RADIUS_MEAN = 2439.7 * Units.KILOMETRE_$LI$(); } return AstronomicalConstants.MERCURY_RADIUS_MEAN; }
/**
* Mercury mass
*/
public static MERCURY_MASS: number; public static MERCURY_MASS_$LI$(): number { if (AstronomicalConstants.MERCURY_MASS == null) { AstronomicalConstants.MERCURY_MASS = 0.0553 * AstronomicalConstants.EARTH_MASS_$LI$(); } return AstronomicalConstants.MERCURY_MASS; }
/**
* Mercury semi-major axis
*/
public static MERCURY_SEMI_MAJOR_AXIS: number; public static MERCURY_SEMI_MAJOR_AXIS_$LI$(): number { if (AstronomicalConstants.MERCURY_SEMI_MAJOR_AXIS == null) { AstronomicalConstants.MERCURY_SEMI_MAJOR_AXIS = 0.387098 * AstronomicalConstants.ASTRONOMICAL_UNIT_$LI$(); } return AstronomicalConstants.MERCURY_SEMI_MAJOR_AXIS; }
/**
* Venus radius
*/
public static VENUS_RADIUS_MEAN: number; public static VENUS_RADIUS_MEAN_$LI$(): number { if (AstronomicalConstants.VENUS_RADIUS_MEAN == null) { AstronomicalConstants.VENUS_RADIUS_MEAN = 6051.8 * Units.KILOMETRE_$LI$(); } return AstronomicalConstants.VENUS_RADIUS_MEAN; }
/**
* Venus mass
*/
public static VENUS_MASS: number; public static VENUS_MASS_$LI$(): number { if (AstronomicalConstants.VENUS_MASS == null) { AstronomicalConstants.VENUS_MASS = 0.815 * AstronomicalConstants.EARTH_MASS_$LI$(); } return AstronomicalConstants.VENUS_MASS; }
/**
* Venus semi-major axis
*/
public static VENUS_SEMI_MAJOR_AXIS: number; public static VENUS_SEMI_MAJOR_AXIS_$LI$(): number { if (AstronomicalConstants.VENUS_SEMI_MAJOR_AXIS == null) { AstronomicalConstants.VENUS_SEMI_MAJOR_AXIS = 0.723332 * AstronomicalConstants.ASTRONOMICAL_UNIT_$LI$(); } return AstronomicalConstants.VENUS_SEMI_MAJOR_AXIS; }
/**
* Mars radius
*/
public static MARS_RADIUS_MEAN: number; public static MARS_RADIUS_MEAN_$LI$(): number { if (AstronomicalConstants.MARS_RADIUS_MEAN == null) { AstronomicalConstants.MARS_RADIUS_MEAN = 3389.5 * Units.KILOMETRE_$LI$(); } return AstronomicalConstants.MARS_RADIUS_MEAN; }
/**
* Mars mass
*/
public static MARS_MASS: number; public static MARS_MASS_$LI$(): number { if (AstronomicalConstants.MARS_MASS == null) { AstronomicalConstants.MARS_MASS = 0.107 * AstronomicalConstants.EARTH_MASS_$LI$(); } return AstronomicalConstants.MARS_MASS; }
/**
* Mars semi-major axis
*/
public static MARS_SEMI_MAJOR_AXIS: number; public static MARS_SEMI_MAJOR_AXIS_$LI$(): number { if (AstronomicalConstants.MARS_SEMI_MAJOR_AXIS == null) { AstronomicalConstants.MARS_SEMI_MAJOR_AXIS = 1.523679 * AstronomicalConstants.ASTRONOMICAL_UNIT_$LI$(); } return AstronomicalConstants.MARS_SEMI_MAJOR_AXIS; }
/**
* Jupiter radius
*/
public static JUPITER_RADIUS_MEAN: number; public static JUPITER_RADIUS_MEAN_$LI$(): number { if (AstronomicalConstants.JUPITER_RADIUS_MEAN == null) { AstronomicalConstants.JUPITER_RADIUS_MEAN = 69911 * Units.KILOMETRE_$LI$(); } return AstronomicalConstants.JUPITER_RADIUS_MEAN; }
/**
* Jupiter mass
*/
public static JUPITER_MASS: number; public static JUPITER_MASS_$LI$(): number { if (AstronomicalConstants.JUPITER_MASS == null) { AstronomicalConstants.JUPITER_MASS = 317.8 * AstronomicalConstants.EARTH_MASS_$LI$(); } return AstronomicalConstants.JUPITER_MASS; }
/**
* Jupiter semi-major axis
*/
public static JUPITER_SEMI_MAJOR_AXIS: number; public static JUPITER_SEMI_MAJOR_AXIS_$LI$(): number { if (AstronomicalConstants.JUPITER_SEMI_MAJOR_AXIS == null) { AstronomicalConstants.JUPITER_SEMI_MAJOR_AXIS = 5.2026 * AstronomicalConstants.ASTRONOMICAL_UNIT_$LI$(); } return AstronomicalConstants.JUPITER_SEMI_MAJOR_AXIS; }
/**
* Saturn radius
*/
public static SATURN_RADIUS_MEAN: number; public static SATURN_RADIUS_MEAN_$LI$(): number { if (AstronomicalConstants.SATURN_RADIUS_MEAN == null) { AstronomicalConstants.SATURN_RADIUS_MEAN = 58232 * Units.KILOMETRE_$LI$(); } return AstronomicalConstants.SATURN_RADIUS_MEAN; }
/**
* Saturn mass
*/
public static SATURN_MASS: number; public static SATURN_MASS_$LI$(): number { if (AstronomicalConstants.SATURN_MASS == null) { AstronomicalConstants.SATURN_MASS = 95.159 * AstronomicalConstants.EARTH_MASS_$LI$(); } return AstronomicalConstants.SATURN_MASS; }
/**
* Saturn semi-major axis
*/
public static SATURN_SEMI_MAJOR_AXIS: number; public static SATURN_SEMI_MAJOR_AXIS_$LI$(): number { if (AstronomicalConstants.SATURN_SEMI_MAJOR_AXIS == null) { AstronomicalConstants.SATURN_SEMI_MAJOR_AXIS = 9.5549 * AstronomicalConstants.ASTRONOMICAL_UNIT_$LI$(); } return AstronomicalConstants.SATURN_SEMI_MAJOR_AXIS; }
/**
* Uranus radius
*/
public static URANUS_RADIUS_MEAN: number; public static URANUS_RADIUS_MEAN_$LI$(): number { if (AstronomicalConstants.URANUS_RADIUS_MEAN == null) { AstronomicalConstants.URANUS_RADIUS_MEAN = 25362 * Units.KILOMETRE_$LI$(); } return AstronomicalConstants.URANUS_RADIUS_MEAN; }
/**
* Uranus mass
*/
public static URANUS_MASS: number; public static URANUS_MASS_$LI$(): number { if (AstronomicalConstants.URANUS_MASS == null) { AstronomicalConstants.URANUS_MASS = 14.536 * AstronomicalConstants.EARTH_MASS_$LI$(); } return AstronomicalConstants.URANUS_MASS; }
/**
* Uranus semi-major axis
*/
public static URANUS_SEMI_MAJOR_AXIS: number; public static URANUS_SEMI_MAJOR_AXIS_$LI$(): number { if (AstronomicalConstants.URANUS_SEMI_MAJOR_AXIS == null) { AstronomicalConstants.URANUS_SEMI_MAJOR_AXIS = 19.2184 * AstronomicalConstants.ASTRONOMICAL_UNIT_$LI$(); } return AstronomicalConstants.URANUS_SEMI_MAJOR_AXIS; }
/**
* Neptune radius
*/
public static NEPTUNE_RADIUS_MEAN: number; public static NEPTUNE_RADIUS_MEAN_$LI$(): number { if (AstronomicalConstants.NEPTUNE_RADIUS_MEAN == null) { AstronomicalConstants.NEPTUNE_RADIUS_MEAN = 24622 * Units.KILOMETRE_$LI$(); } return AstronomicalConstants.NEPTUNE_RADIUS_MEAN; }
/**
* Neptune mass
*/
public static NEPTUNE_MASS: number; public static NEPTUNE_MASS_$LI$(): number { if (AstronomicalConstants.NEPTUNE_MASS == null) { AstronomicalConstants.NEPTUNE_MASS = 17.147 * AstronomicalConstants.EARTH_MASS_$LI$(); } return AstronomicalConstants.NEPTUNE_MASS; }
/**
* Neptune semi-major axis
*/
public static NEPTUNE_SEMI_MAJOR_AXIS: number; public static NEPTUNE_SEMI_MAJOR_AXIS_$LI$(): number { if (AstronomicalConstants.NEPTUNE_SEMI_MAJOR_AXIS == null) { AstronomicalConstants.NEPTUNE_SEMI_MAJOR_AXIS = 30.110387 * AstronomicalConstants.ASTRONOMICAL_UNIT_$LI$(); } return AstronomicalConstants.NEPTUNE_SEMI_MAJOR_AXIS; }
}
AstronomicalConstants["__class"] = "org.mariuszgromada.math.mxparser.mathcollection.AstronomicalConstants"; | the_stack |
import * as React from "react";
import styles from "./TreeOrgChart.module.scss";
import { ITreeOrgChartProps } from "./ITreeOrgChartProps";
import { ITreeOrgChartState } from "./ITreeOrgChartState";
import SortableTree from "react-sortable-tree";
import "react-sortable-tree/style.css";
import {
IPersonaSharedProps,
Persona,
PersonaSize
} from "office-ui-fabric-react/lib/Persona";
import { IconButton } from "office-ui-fabric-react/lib/Button";
import { WebPartTitle } from "@pnp/spfx-controls-react/lib/WebPartTitle";
import SPService from "../../../services/SPServices";
import { ITreeData } from "./ITreeData";
import {
Spinner,
SpinnerSize
} from "office-ui-fabric-react/lib/components/Spinner";
import { DisplayMode, Environment, EnvironmentType } from "@microsoft/sp-core-library";
import { PeoplePicker, PrincipalType } from "@pnp/spfx-controls-react/lib/PeoplePicker";
import { SPComponentLoader } from '@microsoft/sp-loader';
import * as strings from 'TreeOrgChartWebPartStrings';
import GraphServices, { IGraphUser } from "../../../services/GraphService";
import GraphService from "../../../services/GraphService";
export enum TreeOrgChartType {
MyTeam = 1,
CompanyHierarchy = 2,
ShowOtherTeam = 4
}
const LIVE_PERSONA_COMPONENT_ID: string = '914330ee-2df2-4f6e-a858-30c23a812408';
export default class TreeOrgChart extends React.Component<
ITreeOrgChartProps,
ITreeOrgChartState
> {
private treeData: ITreeData[];
private SPService: SPService;
private GraphService: GraphService;
constructor(props) {
super(props);
this.SPService = new SPService(this.props.context);
this.GraphService = new GraphServices(this.props.context);
this.state = {
treeData: [],
isLoading: true
};
}
//
private handleTreeOnChange(treeData) {
this.setState({ treeData });
}
public async componentDidUpdate(
prevProps: ITreeOrgChartProps,
prevState: ITreeOrgChartState
) {
if (
this.props.viewType !== prevProps.viewType ||
this.props.maxLevels !== prevProps.maxLevels ||
this.props.teamLeader !== prevProps.teamLeader ||
this.props.excludefilter !== prevProps.excludefilter ||
this.props.filter !== prevProps.filter ||
this.props.detailBehavoir !== prevProps.detailBehavoir
) {
await this.loadOrgchart();
}
}
public async componentDidMount() {
if (Environment.type !== EnvironmentType.Local) {
const sharedLibrary = await this._loadSPComponentById(
LIVE_PERSONA_COMPONENT_ID
);
const livePersonaCard: any = sharedLibrary.LivePersonaCard;
this.setState({ livePersonaCard: livePersonaCard });
}
await this.loadOrgchart();
}
private async _loadSPComponentById(componentId: string): Promise<any> {
try {
const component: any = await SPComponentLoader.loadComponentById(
componentId
);
return component;
} catch (error) {
Promise.reject(error);
}
}
private livePersonaCard(user: IGraphUser): JSX.Element {
return React.createElement(
this.state.livePersonaCard,
{
serviceScope: this.props.context.serviceScope,
upn: user.userPrincipalName,
onCardOpen: () => {
console.log('LivePersonaCard Open');
},
onCardClose: () => {
console.log('LivePersonaCard Close');
},
}, this.buildDefaultPersonaCard(user)
);
}
/*
// Load Organization Chart
*/
public async loadOrgchart() {
this.setState({ treeData: [], isLoading: true });
const currentUser = this.props.context.pageContext.user.loginName;
let currentUserProperties = null;
this.treeData = [];
// Test if show only my Team or All Organization Chart
switch (this.props.viewType) {
case TreeOrgChartType.CompanyHierarchy:
const spcurrentlogin = `i:0#.f|membership|${currentUser}`;
currentUserProperties = await this.SPService.getUserProperties(
spcurrentlogin
);
const treeManagers = await this.buildOrganizationChart(
currentUserProperties
);
if (treeManagers) this.treeData.push(treeManagers);
break;
case TreeOrgChartType.MyTeam:
const myteam = await this.buildMyTeamOrganizationChart(
currentUser
);
if (myteam)
this.treeData = [{ ...myteam }];
break;
case TreeOrgChartType.ShowOtherTeam:
if (this.props.teamLeader && this.props.teamLeader.length > 0) {
const otherteam = await this.buildTeamLeaderOrganizationChart(
this.props.teamLeader
);
if (otherteam)
this.treeData = [{ ...otherteam }];
}
break;
}
this.setState({ treeData: this.treeData, isLoading: false });
}
/*
Build Organization Chart from currentUser
@parm : currentUserProperties
*/
public async buildOrganizationChart(currentUserProperties: any) {
// Get Managers
let treeManagers: ITreeData | null = null;
if (
currentUserProperties.ExtendedManagers &&
currentUserProperties.ExtendedManagers.length > 0
) {
const upn: string | undefined = this.claimUserToUPN(currentUserProperties.ExtendedManagers[0]);
if (upn) {
treeManagers = await this.getUsers(
upn
);
}
}
return treeManagers;
}
private claimUserToUPN(claim: string): string | undefined {
if (!claim) { return undefined; }
const claimuser: string[] = claim.split('|');
if (claimuser.length > 1) {
const upn = claimuser[claimuser.length - 1];
if (upn && upn.length > 0 && upn.indexOf('@') !== -1) {
return upn;
}
}
return undefined;
}
public buildPersonaCard(user: IGraphUser): JSX.Element {
if (this.props.detailBehavoir) {
if (this.state.livePersonaCard) {
return (this.livePersonaCard(user));
}
return this.buildDefaultPersonaCard(user);
} else {
return this.buildDefaultPersonaCard(user);
}
}
public buildDefaultPersonaCard(user: IGraphUser): JSX.Element {
let spUser: IPersonaSharedProps = {};
let imageInitials: string[] = user.displayName ? user.displayName.split(" ") : [];
//https://graph.microsoft.com/v1.0/users/${upn}/photo/$value
// Persona Card Properties
spUser.imageUrl = user.userPrincipalName ? `/_layouts/15/userphoto.aspx?size=L&username=${user.userPrincipalName}` : undefined;
spUser.imageInitials = imageInitials && imageInitials.length > 0 ? `${imageInitials[0]
.substring(0, 1)
.toUpperCase()}${imageInitials[1] ? imageInitials[1].substring(0, 1).toUpperCase() : ''}` : '';
spUser.text = user.displayName;
spUser.tertiaryText = user.mail;
spUser.secondaryText = user.jobTitle;
// PersonaCard component
return (
<Persona
{...spUser}
hidePersonaDetails={false}
size={PersonaSize.size40}
/>
);
}
private async getUsers(upn: string): Promise<ITreeData | null> {
const managerUser = await this.GraphService.getUser(upn);
const person = this.buildPersonaCard(managerUser);
if (managerUser.userPrincipalName) {
return ({
title: person,
expanded: true,
children: await this.getDirectReportsUsers(managerUser.userPrincipalName)
});
} else {
return { title: person };
}
}
private async getDirectReportsUsers(upn?: string, level: number = 1, expanded: boolean = false): Promise<ITreeData[] | null> {
if (!upn) { return null; }
const directReportsUser = await this.GraphService.getUserDirectReports(upn,this.props.excludefilter,this.props.filter);
//this is already level 1
if (directReportsUser && directReportsUser.length > 0) {
return await Promise.all(directReportsUser.map(async (dr) => {
const children = ((level +1) <= this.props.maxLevels) ? await this.getDirectReportsUsers(dr.userPrincipalName, level + 1) : null;
return ({
title: this.buildPersonaCard(dr),
expanded: expanded,
children: children
});
}));
}
return null;
}
//buildTeamLeaderOrganizationChart
private async buildTeamLeaderOrganizationChart(upn: string): Promise<ITreeData | null> {
const tmpupn: string | undefined = this.claimUserToUPN(upn);
return await this.getUsers(tmpupn ? tmpupn : upn);
}
/*
Build My Team Organization Chart
@parm: currentUserProperties
*/
private async buildMyTeamOrganizationChart(upn: string): Promise<ITreeData | null> {
const mymanager = await this.GraphService.getUserManger(upn);
if (mymanager && mymanager.userPrincipalName) {
return await this.getUsers(mymanager.userPrincipalName);
}
return await this.getUsers(upn);
}
// Render
public render(): React.ReactElement<ITreeOrgChartProps> {
const showEditOther: boolean = this.props.displayMode === DisplayMode.Edit && this.props.viewType === TreeOrgChartType.ShowOtherTeam;
let selectedTeamleader: string | undefined = undefined;
if (showEditOther && this.props.teamLeader && this.props.teamLeader.length > 0) {
selectedTeamleader = this.claimUserToUPN(this.props.teamLeader);
if (!selectedTeamleader) {
selectedTeamleader == this.props.teamLeader;
}
}
return (
<div className={styles.treeOrgChart}>
<WebPartTitle
displayMode={this.props.displayMode}
title={this.props.title}
updateProperty={this.props.updateProperty}
/>
{showEditOther && (<div>
<PeoplePicker
context={this.props.context}
titleText={strings.TeamLeaderHeadline}
personSelectionLimit={1}
groupName={""} // Leave this blank in case you want to filter from all users
isRequired={true}
disabled={false}
defaultSelectedUsers={selectedTeamleader ? [selectedTeamleader] : undefined}
selectedItems={(items: any) => {
if (this.props.updateTeamLeader) {
if (items.length > 0) {
const teamleaderupn: string | undefined = this.claimUserToUPN(items[0].loginName);
if (teamleaderupn) {
this.props.updateTeamLeader(teamleaderupn);
return;
}
}
this.props.updateTeamLeader('');
}
}}
showHiddenInUI={false}
principalTypes={[PrincipalType.User]}
resolveDelay={1000} />
</div>)}
{this.state.isLoading ? (
<Spinner
size={SpinnerSize.large}
label="Loading Organization Chart ..."
></Spinner>
) : null}
<div className={styles.treeContainer}>
<SortableTree
treeData={this.state.treeData}
onChange={this.handleTreeOnChange.bind(this)}
canDrag={false}
rowHeight={70}
maxDepth={this.props.maxLevels}
generateNodeProps={rowInfo => {
return !this.props.detailBehavoir ?
({
buttons: [
<IconButton
disabled={false}
checked={false}
iconProps={{ iconName: "ContactInfo" }}
title={strings.ContactInfoTitle}
ariaLabel={strings.ContactInfoTitle}
onClick={() => {
window.open(
`https://eur.delve.office.com/?p=${rowInfo.node.title.props.tertiaryText}&v=work`
);
}}
/>
]
}) : undefined;
}
}
/>
</div>
</div>
);
}
} | the_stack |
import Logger from "@supercollider/logger";
import { ChildProcess, spawn } from "child_process";
import cuid from "cuid";
import { EventEmitter } from "events";
import fs from "fs";
import yaml from "js-yaml";
import _ from "lodash";
import path from "path";
import temp from "temp";
import untildify from "untildify";
import { SCError } from "./Errors";
import { SclangCompileResult, SclangIO, State } from "./internals/sclang-io";
import { resolveOptions, SCLangConf, SCLangOptions } from "./options";
/**
* TODO: type this better
* but really it could be just about anything
*/
export type SclangResultType = any;
/** Args for constructor */
export type SCLangArgs = Partial<SCLangOptions>;
/**
* This class manages a supercollider language interpreter process
* and sends messages to and from it using STDIN / STDOUT.
*
* SuperCollider comes with an executable called sclang
* which can be communicated with via stdin/stdout
* or via OSC.
*
* @memberof of lang
*/
export default class SCLang extends EventEmitter {
options: SCLangOptions;
process?: ChildProcess;
log: Logger;
stateWatcher: SclangIO;
/*
* @param {object} options - sclang command line options
*/
constructor(options?: SCLangArgs) {
super();
this.options = resolveOptions(options);
this.log = new Logger(this.options.debug, this.options.echo, this.options.log);
this.log.dbug(this.options);
this.stateWatcher = this.makeStateWatcher();
}
/**
* command line args for sclang
*
* ```
* -d <path> Set runtime directory
* -D Enter daemon mode (no input)
* -g <memory-growth>[km] Set heap growth (default 256k)
* -h Display this message and exit
* -l <path> Set library configuration file
* -m <memory-space>[km] Set initial heap size (default 2m)
* -r Call Main.run on startup
* -s Call Main.stop on shutdown
* -u <network-port-number> Set UDP listening port (default 57120)
* -i <ide-name> Specify IDE name (for enabling IDE-specific class code, default "none")
* -a Standalone mode
* ```
*/
args(options: {
/**
* Port for lang to connect to scsynth from
*/
langPort?: number;
/**
* Path to sclang conf file
*/
conf?: string;
/**
* Path to .scd file to execute
*/
executeFile?: string;
}): string[] {
const o: string[] = [];
o.push("-i", "supercolliderjs");
if (options.executeFile) {
o.push(options.executeFile);
}
if (options.langPort) {
o.push("-u", String(options.langPort));
}
if (options.conf) {
o.push("-l", options.conf);
}
return o;
}
/**
* makeSclangConfig
*
* make sclang_config.yaml as a temporary file
* with the supplied values
*
* This is the config file that sclang reads, specifying
* includePaths and excludePaths
*
* Resolves with path of written config file.
*/
makeSclangConfig(conf: SCLangConf): Promise<string> {
/**
write options as yaml to a temp file
and return the path
**/
const str = yaml.safeDump(conf, { indent: 4 });
return new Promise((resolve, reject) => {
temp.open("sclang-conf", function(err, info) {
if (err) {
return reject(err);
}
fs.write(info.fd, str, err2 => {
if (err2) {
reject(err2);
} else {
fs.close(info.fd, err3 => {
if (err3) {
reject(err3);
} else {
resolve(info.path);
}
});
}
});
});
});
}
isReady(): boolean {
return this.stateWatcher.state === State.READY;
}
/**
* Start sclang executable as a subprocess.
*
* sclang will compile it's class library, and this may result in syntax
* or compile errors. These errors are parsed and returned in a structured format.
*
* Resolves with:
*
* ```js
* {dirs: [compiled directories]}
* ```
*
* or rejects with:
*
* ```js
* {
* dirs: [],
* compileErrors: [],
* parseErrors: [],
* duplicateClasses: [],
* errors[],
* extensionErrors: [],
* stdout: 'compiling class library...etc.'
* }
* ```
*/
async boot(): Promise<SclangCompileResult> {
this.setState(State.BOOTING);
// merge supercollider.js options with any sclang_conf
const conf = this.sclangConfigOptions(this.options);
const confPath = await this.makeSclangConfig(conf);
return this.spawnProcess(this.options.sclang, _.extend({}, this.options, { conf: confPath }));
}
/**
* spawnProcess - starts the sclang executable
*
* sets this.process
* adds state listeners
*
* @param {string} execPath - path to sclang
* @param {object} commandLineOptions - options for the command line
* filtered with this.args so it will only include values
* that sclang uses.
* @returns {Promise}
* resolves null on successful boot and compile
* rejects on failure to boot or failure to compile the class library
*/
spawnProcess(execPath: string, commandLineOptions: object): Promise<SclangCompileResult> {
return new Promise((resolve, reject) => {
let done = false;
this.process = this._spawnProcess(execPath, this.args(commandLineOptions));
if (!(this.process && this.process.pid)) {
// check if path exists
const exists = fs.existsSync(execPath);
reject(new Error(`Failed to spawn process! ${exists ? "" : " Executable not found."} path: ${execPath}`));
return;
}
const bootListener = (state: State): void => {
if (state === State.READY) {
done = true;
this.removeListener("state", bootListener);
resolve(this.stateWatcher.result);
} else if (state === State.COMPILE_ERROR) {
done = true;
reject(new SCError("CompileError", this.stateWatcher.result));
this.removeListener("state", bootListener);
// probably should remove all listeners
}
};
// temporary listener until booted ready or compileError
// that removes itself
this.addListener("state", bootListener);
setTimeout(() => {
if (!done) {
const err = `Timeout waiting for sclang to boot pid:${this.process && this.process.pid}`;
this.log.err(err);
// force it to finalize
this.stateWatcher.processOutput();
// bootListener above will reject the promise
this.stateWatcher.setState(State.COMPILE_ERROR);
this.removeListener("state", bootListener);
reject(new Error(err));
}
}, 10000);
// long term listeners
if (this.process) {
this.installListeners(this.process, Boolean(this.options.stdin));
}
});
}
_spawnProcess(execPath: string, commandLineOptions: string[]): ChildProcess {
return spawn(execPath, commandLineOptions, {
cwd: path.dirname(execPath),
});
}
/**
* sclangConfigOptions
*
* Builds the options that will be written to the conf file that is read by sclang
* If supercolliderjs-conf specifies a sclang_conf path
* then this is read and any includePaths and excludePaths are merged
*
* throws error if conf cannot be read
*/
sclangConfigOptions(options: SCLangOptions): SCLangConf {
const runtimeIncludePaths = [path.resolve(__dirname, "./supercollider-js")];
const defaultConf: SCLangConf = {
postInlineWarnings: false,
includePaths: [],
excludePaths: [],
};
let conf = defaultConf;
if (options.sclang_conf) {
try {
conf = yaml.safeLoad(fs.readFileSync(untildify(options.sclang_conf), "utf8"));
} catch (e) {
// By default allow a missing sclang_conf file
// so that the language can create it on demand if you use Quarks or LanguageConfig.
if (!options.failIfSclangConfIsMissing) {
// Was the sclang_conf just in the defaults or was it explicitly set ?
this.log.dbug(e);
conf = defaultConf;
} else {
throw new Error("Cannot open or read specified sclang_conf " + options.sclang_conf);
}
}
}
return {
includePaths: _.union<string>(conf.includePaths, options.conf.includePaths, runtimeIncludePaths),
excludePaths: _.union<string>(conf.excludePaths, options.conf.excludePaths),
postInlineWarnings: _.isUndefined(options.conf.postInlineWarnings)
? conf.postInlineWarnings
: !!options.conf.postInlineWarnings,
};
}
makeStateWatcher(): SclangIO {
const stateWatcher = new SclangIO();
for (const name of ["interpreterLoaded", "error", "stdout", "state"]) {
stateWatcher.on(name, (...args) => {
this.emit(name, ...args);
});
}
return stateWatcher;
}
/**
* listen to events from process and pipe stdio to the stateWatcher
*/
installListeners(subprocess: ChildProcess, listenToStdin = false): void {
if (listenToStdin) {
// stdin of the global top level nodejs process
process.stdin.setEncoding("utf8");
process.stdin.on("data", chunk => {
if (chunk) {
this.write(chunk, true);
}
});
}
if (subprocess.stdout) {
subprocess.stdout.on("data", data => {
const ds = String(data);
this.log.dbug(ds);
this.stateWatcher.parse(ds);
});
}
if (subprocess.stderr) {
subprocess.stderr.on("data", data => {
const error = String(data);
this.log.stderr(error);
this.emit("stderr", error);
});
}
subprocess.on("error", err => {
this.log.err("ERROR:" + err);
this.emit("stderr", err);
});
subprocess.on("close", (code, signal) => {
this.log.dbug("close " + code + signal);
this.emit("exit", code);
this.setState(State.NULL);
});
subprocess.on("exit", (code, signal) => {
this.log.dbug("exit " + code + signal);
this.emit("exit", code);
this.setState(State.NULL);
});
subprocess.on("disconnect", () => {
this.log.dbug("disconnect");
this.emit("exit");
this.setState(State.NULL);
});
}
/**
* write
*
* Send a raw string to sclang to be interpreted
* callback is called after write is complete.
*/
write(chunk: string, noEcho: boolean): void {
if (!noEcho) {
this.log.stdin(chunk);
}
this.log.dbug(chunk);
if (this.process && this.process.stdin) {
this.process.stdin.write(chunk, "UTF-8");
// Send the escape character which is interpreted by sclang as:
// "evaluate the currently accumulated command line as SC code"
this.process.stdin.write("\x0c", "UTF-8", error => error && this.log.err(error));
}
}
/**
* storeSclangConf
*
* Store the original configuration path
* so that it can be accessed by the modified Quarks methods
* to store into the correct conf file.
*/
async storeSclangConf(): Promise<SCLang> {
if (this.options.sclang_conf) {
const confPath = path.resolve(untildify(this.options.sclang_conf));
const setConfigPath = 'SuperColliderJS.sclangConf = "' + confPath + '";\n\n';
await this.interpret(setConfigPath, undefined, true, true, true);
}
return this;
}
/**
* Interprets code in sclang and returns a Promise.
*
* @param {String} code
* source code to evaluate
* @param {String} nowExecutingPath
set thisProcess.nowExecutingPath
* for use in a REPL to evaluate text in a file
* and let sclang know what file it is executing.
* @param {Boolean} asString
* return result .asString for post window
* otherwise returns result as a JSON object
* @param {Boolean} postErrors
* call error.reportError on any errors
* which posts call stack, receiver, args, etc
* @param {Boolean} getBacktrace
* return full backtrace
* @returns {Promise} results - which resolves with result as JSON or rejects with SCLangError.
*/
interpret(
code: string,
nowExecutingPath?: string,
asString = false,
postErrors = true,
getBacktrace = true,
): Promise<SclangResultType> {
return new Promise((resolve, reject): void => {
const escaped = code
.replace(/[\n\r]/g, "__NL__")
.replace(/\\/g, "__SLASH__")
.replace(/"/g, '\\"');
const guid = cuid();
const args = [
'"' + guid + '"',
'"' + escaped + '"',
nowExecutingPath ? '"' + nowExecutingPath + '"' : "nil",
asString ? "true" : "false",
postErrors ? "true" : "false",
getBacktrace ? "true" : "false",
].join(",");
this.stateWatcher.registerCall(guid, { resolve, reject });
this.write("SuperColliderJS.interpret(" + args + ");", true);
});
}
/**
* executeFile
*/
executeFile(filename: string): Promise<any> {
return new Promise((resolve, reject): void => {
const guid = cuid();
this.stateWatcher.registerCall(guid, { resolve, reject });
this.write(`SuperColliderJS.executeFile("${guid}", "${filename}")`, true);
});
}
private setState(state: State): void {
this.stateWatcher.setState(state);
}
compilePaths(): string[] {
return this.stateWatcher.result.dirs;
}
quit(): Promise<SCLang> {
return new Promise((resolve): void => {
const cleanup = (): void => {
this.process = undefined;
this.setState(State.NULL);
resolve(this);
};
if (this.process) {
this.process.once("exit", cleanup);
// request a polite shutdown
this.process.kill("SIGINT");
setTimeout((): void => {
// 3.6.6 doesn't fully respond to SIGINT
// but SIGTERM causes it to crash
if (this.process) {
this.process.kill("SIGTERM");
cleanup();
}
}, 250);
} else {
cleanup();
}
});
}
/**
* @deprecated
*
* @static
* @memberof SCLang
*/
static boot = boot;
}
/**
* Boots an sclang interpreter, resolving options and connecting.
*
* @memberof lang
*
* For values not supplied in options, it will load for config files in:
* - .supercollider.yaml
* - ~/.supercollider.yaml
*/
export async function boot(options?: SCLangArgs): Promise<SCLang> {
const sclang = new SCLang(options);
await sclang.boot();
await sclang.storeSclangConf();
return sclang;
} | the_stack |
import DataColumn, {DataColumnOptions, ShaderAttributeOptions, BufferAccessor} from './data-column';
import {IShaderAttribute} from './shader-attribute';
import assert from '../../utils/assert';
import {createIterable, getAccessorFromBuffer} from '../../utils/iterable-utils';
import {fillArray} from '../../utils/flatten';
import * as range from '../../utils/range';
import {normalizeTransitionSettings, TransitionSettings} from './attribute-transition-utils';
import type {Buffer} from '@luma.gl/webgl';
import type {NumericArray, TypedArray} from '../../types/types';
export type Accessor<DataType, ReturnType> = (
object: DataType,
context: {
data: any;
index: number;
target: number[];
}
) => ReturnType;
export type Updater = (
attribute: Attribute,
{
data,
startRow,
endRow,
props,
numInstances
}: {
data: any;
startRow: number;
endRow: number;
props: any;
numInstances: number;
}
) => void;
export type AttributeOptions = DataColumnOptions<{
transition?: boolean;
noAlloc?: boolean;
update?: Updater;
accessor?: Accessor<any, any> | string | string[];
transform?: (value: any) => any;
shaderAttributes?: Record<string, Partial<ShaderAttributeOptions>>;
}>;
export type BinaryAttribute = Partial<BufferAccessor> & {value?: TypedArray; buffer?: Buffer};
type AttributeInternalState = {
startIndices: NumericArray | null;
/** Legacy: external binary supplied via attribute name */
lastExternalBuffer: TypedArray | Buffer | BinaryAttribute | null;
/** External binary supplied via accessor name */
binaryValue: TypedArray | Buffer | BinaryAttribute | null;
binaryAccessor: Accessor<any, any> | null;
needsUpdate: string | boolean;
needsRedraw: string | boolean;
updateRanges: number[][];
};
export default class Attribute extends DataColumn<AttributeOptions, AttributeInternalState> {
/** Legacy approach to set attribute value - read `isConstant` instead for attribute state */
constant: boolean = false;
constructor(gl: WebGLRenderingContext, opts: AttributeOptions) {
super(gl, opts, {
startIndices: null,
lastExternalBuffer: null,
binaryValue: null,
binaryAccessor: null,
needsUpdate: true,
needsRedraw: false,
updateRanges: range.FULL
});
// eslint-disable-next-line
this.settings.update = opts.update || (opts.accessor ? this._autoUpdater : undefined);
Object.seal(this.settings);
Object.seal(this.state);
// Check all fields and generate helpful error messages
this._validateAttributeUpdaters();
}
get startIndices(): NumericArray | null {
return this.state.startIndices;
}
set startIndices(layout: NumericArray | null) {
this.state.startIndices = layout;
}
needsUpdate(): string | boolean {
return this.state.needsUpdate;
}
needsRedraw({clearChangedFlags = false}: {clearChangedFlags?: boolean} = {}): string | boolean {
const needsRedraw = this.state.needsRedraw;
this.state.needsRedraw = needsRedraw && !clearChangedFlags;
return needsRedraw;
}
getUpdateTriggers(): string[] {
const {accessor} = this.settings;
// Backards compatibility: allow attribute name to be used as update trigger key
return [this.id].concat((typeof accessor !== 'function' && accessor) || []);
}
supportsTransition(): boolean {
return Boolean(this.settings.transition);
}
// Resolve transition settings object if transition is enabled, otherwise `null`
getTransitionSetting(opts: Record<string, any>): TransitionSettings | null {
if (!opts || !this.supportsTransition()) {
return null;
}
const {accessor} = this.settings;
// TODO: have the layer resolve these transition settings itself?
const layerSettings = this.settings.transition;
// these are the transition settings passed in by the user
const userSettings = Array.isArray(accessor)
? // @ts-ignore
opts[accessor.find(a => opts[a])]
: // @ts-ignore
opts[accessor];
// Shorthand: use duration instead of parameter object
return normalizeTransitionSettings(userSettings, layerSettings);
}
setNeedsUpdate(reason: string = this.id, dataRange?: {startRow?: number; endRow?: number}): void {
this.state.needsUpdate = this.state.needsUpdate || reason;
this.setNeedsRedraw(reason);
if (dataRange) {
const {startRow = 0, endRow = Infinity} = dataRange;
this.state.updateRanges = range.add(this.state.updateRanges, [startRow, endRow]);
} else {
this.state.updateRanges = range.FULL;
}
}
clearNeedsUpdate(): void {
this.state.needsUpdate = false;
this.state.updateRanges = range.EMPTY;
}
setNeedsRedraw(reason: string = this.id): void {
this.state.needsRedraw = this.state.needsRedraw || reason;
}
allocate(numInstances: number): boolean {
const {state, settings} = this;
if (settings.noAlloc) {
// Data is provided through a Buffer object.
return false;
}
if (settings.update) {
super.allocate(numInstances, state.updateRanges !== range.FULL);
return true;
}
return false;
}
updateBuffer({
numInstances,
data,
props,
context
}: {
numInstances: number;
data: any;
props: any;
context: any;
}): boolean {
if (!this.needsUpdate()) {
return false;
}
const {
state: {updateRanges},
settings: {update, noAlloc}
} = this;
let updated = true;
if (update) {
// Custom updater - typically for non-instanced layers
for (const [startRow, endRow] of updateRanges) {
update.call(context, this, {data, startRow, endRow, props, numInstances});
}
if (!this.value) {
// no value was assigned during update
} else if (
this.constant ||
this.buffer.byteLength < (this.value as TypedArray).byteLength + this.byteOffset
) {
this.setData({
value: this.value,
constant: this.constant
});
// Setting attribute.constant in updater is a legacy approach that interferes with allocation in the next cycle
// Respect it here but reset after use
this.constant = false;
} else {
for (const [startRow, endRow] of updateRanges) {
const startOffset = Number.isFinite(startRow) ? this.getVertexOffset(startRow) : 0;
const endOffset = Number.isFinite(endRow)
? this.getVertexOffset(endRow)
: noAlloc || !Number.isFinite(numInstances)
? this.value.length
: numInstances * this.size;
super.updateSubBuffer({startOffset, endOffset});
}
}
this._checkAttributeArray();
} else {
updated = false;
}
this.clearNeedsUpdate();
this.setNeedsRedraw();
return updated;
}
// Use generic value
// Returns true if successful
setConstantValue(value?: NumericArray): boolean {
if (value === undefined || typeof value === 'function') {
return false;
}
const hasChanged = this.setData({constant: true, value});
if (hasChanged) {
this.setNeedsRedraw();
}
this.clearNeedsUpdate();
return true;
}
// Use external buffer
// Returns true if successful
// eslint-disable-next-line max-statements
setExternalBuffer(buffer?: TypedArray | Buffer | BinaryAttribute): boolean {
const {state} = this;
if (!buffer) {
state.lastExternalBuffer = null;
return false;
}
this.clearNeedsUpdate();
if (state.lastExternalBuffer === buffer) {
return true;
}
state.lastExternalBuffer = buffer;
this.setNeedsRedraw();
this.setData(buffer);
return true;
}
// Binary value is a typed array packed from mapping the source data with the accessor
// If the returned value from the accessor is the same as the attribute value, set it directly
// Otherwise use the auto updater for transform/normalization
setBinaryValue(
buffer?: TypedArray | Buffer | BinaryAttribute,
startIndices: NumericArray | null = null
): boolean {
const {state, settings} = this;
if (!buffer) {
state.binaryValue = null;
state.binaryAccessor = null;
return false;
}
if (settings.noAlloc) {
// Let the layer handle this
return false;
}
if (state.binaryValue === buffer) {
this.clearNeedsUpdate();
return true;
}
state.binaryValue = buffer;
this.setNeedsRedraw();
const needsUpdate = settings.transform || startIndices !== this.startIndices;
if (needsUpdate) {
if (ArrayBuffer.isView(buffer)) {
buffer = {value: buffer};
}
const binaryValue = buffer as BinaryAttribute;
assert(ArrayBuffer.isView(binaryValue.value), `invalid ${settings.accessor}`);
const needsNormalize = binaryValue.size && binaryValue.size !== this.size;
state.binaryAccessor = getAccessorFromBuffer(binaryValue.value, {
size: binaryValue.size || this.size,
stride: binaryValue.stride,
offset: binaryValue.offset,
startIndices: startIndices as NumericArray,
nested: needsNormalize
});
// Fall through to auto updater
return false;
}
this.clearNeedsUpdate();
this.setData(buffer);
return true;
}
getVertexOffset(row: number): number {
const {startIndices} = this;
const vertexIndex = startIndices ? startIndices[row] : row;
return vertexIndex * this.size;
}
getShaderAttributes(): Record<string, IShaderAttribute> {
const shaderAttributeDefs = this.settings.shaderAttributes || {[this.id]: null};
const shaderAttributes: Record<string, IShaderAttribute> = {};
for (const shaderAttributeName in shaderAttributeDefs) {
Object.assign(
shaderAttributes,
super.getShaderAttributes(shaderAttributeName, shaderAttributeDefs[shaderAttributeName])
);
}
return shaderAttributes;
}
/* eslint-disable max-depth, max-statements */
private _autoUpdater(
attribute: Attribute,
{
data,
startRow,
endRow,
props,
numInstances
}: {
data: any;
startRow: number;
endRow: number;
props: any;
numInstances: number;
}
): void {
if (attribute.constant) {
return;
}
const {settings, state, value, size, startIndices} = attribute;
const {accessor, transform} = settings;
const accessorFunc: Accessor<any, any> =
state.binaryAccessor ||
// @ts-ignore
(typeof accessor === 'function' ? accessor : props[accessor]);
assert(typeof accessorFunc === 'function', `accessor "${accessor}" is not a function`);
let i = attribute.getVertexOffset(startRow);
const {iterable, objectInfo} = createIterable(data, startRow, endRow);
for (const object of iterable) {
objectInfo.index++;
let objectValue = accessorFunc(object, objectInfo);
if (transform) {
// transform callbacks could be bound to a particular layer instance.
// always point `this` to the current layer.
objectValue = transform.call(this, objectValue);
}
if (startIndices) {
const numVertices =
(objectInfo.index < startIndices.length - 1
? startIndices[objectInfo.index + 1]
: numInstances) - startIndices[objectInfo.index];
if (objectValue && Array.isArray(objectValue[0])) {
let startIndex = i;
for (const item of objectValue) {
attribute._normalizeValue(item, value as TypedArray, startIndex);
startIndex += size;
}
} else if (objectValue && objectValue.length > size) {
(value as TypedArray).set(objectValue, i);
} else {
attribute._normalizeValue(objectValue, objectInfo.target, 0);
fillArray({
target: value,
source: objectInfo.target,
start: i,
count: numVertices
});
}
i += numVertices * size;
} else {
attribute._normalizeValue(objectValue, value as TypedArray, i);
i += size;
}
}
}
/* eslint-enable max-depth, max-statements */
// Validate deck.gl level fields
private _validateAttributeUpdaters() {
const {settings} = this;
// Check that 'update' is a valid function
const hasUpdater = settings.noAlloc || typeof settings.update === 'function';
if (!hasUpdater) {
throw new Error(`Attribute ${this.id} missing update or accessor`);
}
}
// check that the first few elements of the attribute are reasonable
/* eslint-disable no-fallthrough */
private _checkAttributeArray() {
const {value} = this;
const limit = Math.min(4, this.size);
if (value && value.length >= limit) {
let valid = true;
switch (limit) {
case 4:
valid = valid && Number.isFinite(value[3]);
case 3:
valid = valid && Number.isFinite(value[2]);
case 2:
valid = valid && Number.isFinite(value[1]);
case 1:
valid = valid && Number.isFinite(value[0]);
break;
default:
valid = false;
}
if (!valid) {
throw new Error(`Illegal attribute generated for ${this.id}`);
}
}
}
/* eslint-enable no-fallthrough */
} | the_stack |
import * as t from 'babel-types';
import * as assert from 'assert';
import * as bh from '@stopify/util';
import { generic } from '@stopify/normalize-js';
import { fresh } from '@stopify/hygiene';
import * as imm from 'immutable';
import * as capture from './captureLogics';
import { CompilerOpts } from '../types';
import { box } from './boxAssignables';
import { getLabels, AppType } from './label';
import { NodePath } from 'babel-traverse';
import * as h from '../helpers';
import {
isNormalMode,
captureExn,
endTurnExn,
captureLocals,
target,
restoreNextFrame,
stackFrameCall,
runtime,
runtimeStack,
types,
} from './captureLogics';
let letExpression = bh.letExpression;
export { restoreNextFrame };
const frame = t.identifier('$frame');
const newTarget = t.identifier('newTarget');
const captureFrameId = t.identifier('frame');
const matArgs = t.identifier('materializedArguments');
const restoreExn = t.memberExpression(types, t.identifier('Restore'));
const isRestoringMode = t.unaryExpression('!', isNormalMode);
const popRuntimeStack = t.callExpression(t.memberExpression(runtimeStack,
t.identifier('pop')), []);
const argsLen = t.identifier('argsLen');
const increaseStackSize = t.expressionStatement(t.updateExpression(
'++', t.memberExpression(runtime, t.identifier('remainingStack'))));
const decreaseStackSize = t.expressionStatement(t.updateExpression(
'--', t.memberExpression(runtime, t.identifier('remainingStack'))));
type FunctionT = (t.FunctionExpression | t.FunctionDeclaration) & {
localVars: t.Identifier[]
};
type Labeled<T> = T & {
labels?: number[];
appType?: AppType;
__usesArgs__?: boolean
};
type CaptureFun = (path: NodePath<t.AssignmentExpression>,
opts: CompilerOpts) => void;
interface State {
opts: CompilerOpts
}
function func(path: NodePath<Labeled<FunctionT>>, state: State): void {
const jsArgs = state.opts.jsArgs;
if ((<any>path.node).mark === 'Flat') {
return;
}
const restoreLocals = path.node.localVars;
// We instrument every non-flat function to begin with a *restore block*
// that is able to re-construct a saved stack frame. When the function is
// invoked in restore mode, its formal arguments are already restored.
// The restore block must restore the local variables and deal with
// the *arguments* object. The arguments object is a real pain and hurts
// performance. So, we avoid restoring it faithfully unless we are explicitly
// configured to do so.
const restoreBlock = [
t.expressionStatement(t.assignmentExpression('=', frame, popRuntimeStack)),
t.expressionStatement(t.assignmentExpression('=', target,
t.memberExpression(frame, t.identifier('index')))),
];
if (restoreLocals.length > 0) {
// Restore all local variables. Creates the expression:
// [local0, local1, ... ] = topStack.locals;
restoreBlock.push(t.expressionStatement(t.assignmentExpression('=',
t.arrayPattern(restoreLocals), t.memberExpression(frame,
t.identifier('locals')))));
}
if (path.node.__usesArgs__ && state.opts.jsArgs === 'full') {
// To fully support the arguments object, we need to ensure that the
// formal parameters alias the arguments array. This restores the
// aliases using:
//
// [param0, param1, ...] = topStack.formals
restoreBlock.push(
t.expressionStatement(t.assignmentExpression('=',
t.arrayPattern((<any>path.node.params)),
t.memberExpression(frame, t.identifier('formals')))));
restoreBlock.push(
t.expressionStatement(t.assignmentExpression('=',
argsLen, t.memberExpression(frame, argsLen))));
restoreBlock.push(
t.expressionStatement(t.assignmentExpression('=',
matArgs, t.logicalExpression('||',
t.memberExpression(frame, t.identifier('params')),
matArgs))));
}
const ifRestoring = t.ifStatement(isRestoringMode,
t.blockStatement(restoreBlock));
// The body of a local function that saves the the current stack frame.
const captureBody: t.Statement[] = [ ];
// Save all local variables as an array in frame.locals.
if (restoreLocals.length > 0) {
captureBody.push(
t.expressionStatement(t.assignmentExpression('=',
t.memberExpression(captureFrameId, t.identifier('locals')),
t.arrayExpression(restoreLocals))));
}
// To support 'full' arguments ...
if (path.node.__usesArgs__ && state.opts.jsArgs === 'full') {
// ... save a copy of the parameters in the stack frame and
captureBody.push(t.expressionStatement(t.assignmentExpression('=',
t.memberExpression(captureFrameId, t.identifier('formals')),
t.arrayExpression((<any>path.node.params)))));
// ... save the length of the arguments array in the stack frame
captureBody.push(t.expressionStatement(t.assignmentExpression('=',
t.memberExpression(captureFrameId, argsLen),
argsLen)));
}
const captureClosure = t.functionDeclaration(captureLocals,
[captureFrameId], t.blockStatement(captureBody));
// A local function to restore the next stack frame
const reenterExpr = path.node.__usesArgs__
? t.callExpression(t.memberExpression(path.node.id, t.identifier('apply')),
[t.thisExpression(), matArgs])
: t.callExpression(t.memberExpression(path.node.id, t.identifier('call')),
[t.thisExpression(), ...<any>path.node.params.map(paramToArg)]);
const reenterClosure = t.variableDeclaration('var', [
t.variableDeclarator(restoreNextFrame, t.arrowFunctionExpression([],
t.blockStatement(path.node.__usesArgs__ ?
[t.expressionStatement(t.assignmentExpression('=',
t.memberExpression(matArgs, t.identifier('length')),
t.memberExpression(
t.callExpression(t.memberExpression(t.identifier('Object'),
t.identifier('keys')), [matArgs]), t.identifier('length')))),
t.returnStatement(reenterExpr)] :
[t.returnStatement(reenterExpr)])))]);
const mayMatArgs: t.Statement[] = [];
if (path.node.__usesArgs__) {
const argExpr = jsArgs === 'faithful' || jsArgs === 'full'
? bh.arrayPrototypeSliceCall(t.identifier('arguments'))
: t.identifier('arguments');
mayMatArgs.push(
t.variableDeclaration('let',
[t.variableDeclarator(matArgs, argExpr)]));
const boxedArgs = <imm.Set<string>>(<any>path.node).boxedArgs;
if (jsArgs === 'faithful' || jsArgs === 'full') {
const initMatArgs: t.Statement[] = [];
(<t.Identifier[]>path.node.params).forEach((x, i) => {
if (boxedArgs.contains(x.name)) {
const cons = t.assignmentExpression('=',
t.memberExpression(matArgs, t.numericLiteral(i), true),
box(t.identifier(x.name)));
initMatArgs.push(t.expressionStatement(cons));
}
});
mayMatArgs.push(bh.sIf(isNormalMode, t.blockStatement(initMatArgs)));
}
}
const defineArgsLen = letExpression(argsLen,
t.memberExpression(t.identifier('arguments'), t.identifier('length')));
path.node.body.body.unshift(...[
...(state.opts.jsArgs === 'full' ? [defineArgsLen] : []),
...mayMatArgs,
decreaseStackSize,
ifRestoring,
captureClosure,
reenterClosure,
]);
path.skip();
}
function catchFunc(path: NodePath<Labeled<FunctionT>>, state: State): void {
const jsArgs = state.opts.jsArgs;
if ((<any>path.node).mark === 'Flat') {
return;
}
const restoreLocals = path.node.localVars;
const exn = fresh('exn');
const exnStack = t.memberExpression(exn, t.identifier('stack'));
const params = path.node.__usesArgs__
? matArgs : t.arrayExpression(path.node.params.map(paramToArg));
const captureObject: t.ObjectProperty[] = [
t.objectProperty(t.identifier('kind'), t.stringLiteral('rest')),
t.objectProperty(t.identifier('f'), path.node.id),
t.objectProperty(t.identifier('index'), target),
t.objectProperty(t.identifier('locals'), t.arrayExpression(restoreLocals)),
t.objectProperty(t.identifier('params'), params),
t.objectProperty(t.identifier('this'), t.thisExpression()),
];
if (path.node.__usesArgs__ && jsArgs === 'full') {
// ... save a copy of the parameters in the stack frame and
captureObject.push(t.objectProperty(t.identifier('formals'),
t.arrayExpression((<any>path.node.params))));
// ... save the length of the arguments array in the stack frame
captureObject.push(t.objectProperty(argsLen, argsLen));
}
const frame = t.identifier('$frame');
// We instrument every non-flat function to begin with a *restore block*
// that is able to re-construct a saved stack frame. When the function is
// invoked in restore mode, its formal arguments are already restored.
// The restore block must restore the local variables and deal with
// the *arguments* object. The arguments object is a real pain and hurts
// performance. So, we avoid restoring it faithfully unless we are explicitly
// configured to do so.
const restoreBlock = [
t.variableDeclaration('const', [t.variableDeclarator(frame, popRuntimeStack)]),
t.expressionStatement(t.assignmentExpression('=', target,
t.memberExpression(frame, t.identifier('index')))),
];
if (restoreLocals.length > 0) {
// Restore all local variables. Creates the expression:
// [local0, local1, ... ] = topStack.locals;
restoreBlock.push(t.expressionStatement(t.assignmentExpression('=',
t.arrayPattern(restoreLocals), t.memberExpression(frame,
t.identifier('locals')))));
}
if (path.node.__usesArgs__ && jsArgs === 'full') {
// To fully support the arguments object, we need to ensure that the
// formal parameters alias the arguments array. This restores the
// aliases using:
//
// [param0, param1, ...] = topStack.formals
restoreBlock.push(
t.expressionStatement(t.assignmentExpression('=',
t.arrayPattern((<any>path.node.params)),
t.memberExpression(frame, t.identifier('formals')))));
restoreBlock.push(
t.expressionStatement(t.assignmentExpression('=',
argsLen, t.memberExpression(frame, argsLen))));
restoreBlock.push(
t.expressionStatement(t.assignmentExpression('=',
matArgs, t.logicalExpression('||',
t.memberExpression(frame, t.identifier('params')),
matArgs))));
}
const ifRestoring = t.ifStatement(isRestoringMode,
t.blockStatement(restoreBlock));
const mayMatArgs: t.Statement[] = [];
if (path.node.__usesArgs__) {
const argExpr = jsArgs === 'faithful' || jsArgs === 'full'
? bh.arrayPrototypeSliceCall(t.identifier('arguments'))
: t.identifier('arguments');
mayMatArgs.push(
t.variableDeclaration('let',
[t.variableDeclarator(matArgs, argExpr)]));
const boxedArgs = <imm.Set<string>>(<any>path.node).boxedArgs;
if (jsArgs === 'faithful' || jsArgs === 'full') {
const initMatArgs: t.Statement[] = [];
(<t.Identifier[]>path.node.params).forEach((x, i) => {
if (boxedArgs.contains(x.name)) {
const cons = t.assignmentExpression('=',
t.memberExpression(matArgs, t.numericLiteral(i), true),
box(t.identifier(x.name)));
initMatArgs.push(t.expressionStatement(cons));
}
});
mayMatArgs.push(bh.sIf(isNormalMode, t.blockStatement(initMatArgs)));
}
}
const defineArgsLen = letExpression(argsLen,
t.memberExpression(t.identifier('arguments'), t.identifier('length')));
const wrapBody = t.tryStatement(path.node.body,
t.catchClause(exn, t.blockStatement([
t.ifStatement(t.binaryExpression('instanceof', exn, captureExn),
t.blockStatement([
t.expressionStatement(t.callExpression(t.memberExpression(exnStack, t.identifier('push')), [
t.objectExpression(captureObject),
])),
])),
t.throwStatement(exn)
])));
path.node.body = t.blockStatement([
...(jsArgs === 'full' ? [defineArgsLen] : []),
...mayMatArgs,
decreaseStackSize,
ifRestoring,
wrapBody,
]);
}
const captureLogics: { [key: string]: CaptureFun } = {
lazy: capture.lazyCaptureLogic,
catch: capture.lazyGlobalCatch,
eager: capture.eagerCaptureLogic,
retval: capture.retvalCaptureLogic,
fudge: capture.fudgeCaptureLogic,
};
const instrumentLogics: {
[key: string]: (path: NodePath<Labeled<FunctionT>>, state: State) => void
} = {
catch: catchFunc,
lazy: func,
eager: func,
retval: func,
fudge: func,
};
function isFlat(path: NodePath<t.Node>): boolean {
return (<any>path.getFunctionParent().node).mark === 'Flat';
}
function usesArguments(path: NodePath<t.Function>) {
let r = false;
const visitor = {
ReferencedIdentifier(path: NodePath<t.Identifier>) {
if (path.node.name === 'arguments') {
const parent = path.parent;
if (t.isMemberExpression(parent) &&
parent.object === path.node &&
parent.computed === false &&
t.isIdentifier(parent.property) &&
parent.property.name === 'length') {
// arguments.length is harmless and does not require materialization
}
else {
r = true;
path.stop();
}
}
},
Function(path: NodePath<t.Function>) {
path.skip();
}
};
path.traverse(visitor);
return r;
}
function paramToArg(node: t.LVal) {
if (node.type === 'Identifier') {
return node;
}
else if (node.type === 'RestElement' && node.argument.type === 'Identifier') {
return t.spreadElement(node.argument);
}
else {
throw new Error(`paramToArg: expected Identifier or RestElement, received ${node.type}`);
}
}
function runtimeInvoke(method: string,
...args: t.Expression[]): t.CallExpression {
return t.callExpression(
t.memberExpression(runtime, t.identifier(method)), args);
}
function labelsIncludeTarget(labels: number[]): t.Expression {
return labels.reduce((acc: t.Expression, lbl) =>
bh.or(t.binaryExpression('===', target, t.numericLiteral(lbl)), acc),
bh.eFalse);
}
function isNormalGuarded(stmt: t.Statement): stmt is t.IfStatement {
return (t.isIfStatement(stmt) &&
stmt.test === isNormalMode &&
stmt.alternate === null);
}
const jumper = {
Identifier: function (path: NodePath<t.Identifier>, s: State): void {
if (s.opts.jsArgs === 'full' && path.node.name === 'arguments' &&
(t.isMemberExpression(path.parent) &&
path.parent.property.type === 'Identifier' &&
path.parent.property.name === 'length')) {
path.parentPath.replaceWith(argsLen);
} else if (s.opts.jsArgs === 'full' && path.node.name === 'arguments') {
path.node.name = 'materializedArguments';
}
},
BlockStatement: {
exit(path: NodePath<Labeled<t.BlockStatement>>) {
const stmts = path.node.body;
if (stmts.length === 1) {
return;
}
const blocks = generic.groupBy((x,y) =>
isNormalGuarded(x) && isNormalGuarded(y), stmts);
const result: t.Statement[] = [];
for (const block of blocks) {
if (block.length === 1) {
result.push(block[0]);
}
else {
block.forEach((stmt) => {
assert((<t.IfStatement>stmt).test === isNormalMode);
});
result.push(
bh.sIf(isNormalMode,
t.blockStatement(block.map((stmt) =>(<t.IfStatement>stmt)
.consequent))));
}
}
path.node.body = result;
}
},
ExpressionStatement: {
exit(path: NodePath<Labeled<t.ExpressionStatement>>, s: State) {
if (isFlat(path)) { return; }
if (path.node.appType !== undefined &&
path.node.appType >= AppType.Tail) {
// Skip if the right hand-side is a flat call
if (path.node.expression.type === 'AssignmentExpression' &&
(<any>path.node.expression.right).mark === 'Flat') {
// Do Nothing
}
else {
const captureFun = captureLogics[s.opts.captureMethod];
captureFun(<any>path.get('expression'), s.opts);
return;
}
}
path.replaceWith(t.ifStatement(isNormalMode, path.node));
path.skip();
}
},
"FunctionExpression|FunctionDeclaration": {
enter(path: NodePath<Labeled<FunctionT>>, s: State) {
path.node.__usesArgs__ = usesArguments(path);
if ((<any>path.node).mark === 'Flat') {
return;
}
},
exit(path: NodePath<Labeled<FunctionT>>, state: State): void {
if((<any>path.node).mark === 'Flat') {
return;
}
const instrumentFunction = instrumentLogics[state.opts.captureMethod];
instrumentFunction(path, state);
const declTarget = bh.varDecl(target, t.nullLiteral());
const declFrame = bh.varDecl(frame, t.nullLiteral());
path.node.body.body.unshift(declTarget, declFrame);
// Increment the remainingStack at the last line of the function.
// This does not break tail calls.
path.node.body.body.push(increaseStackSize);
if (state.opts.newMethod === 'direct') {
path.node.localVars.push(newTarget);
const declNewTarget = bh.varDecl(newTarget,
t.memberExpression(t.identifier('new'), t.identifier('target')));
path.node.body.body.unshift(declNewTarget);
const ifConstructor = bh.sIf(newTarget,
t.returnStatement(t.thisExpression()));
(<any>ifConstructor).isTransformed = true;
path.node.body.body.push(ifConstructor);
}
}
},
WhileStatement: function (path: NodePath<Labeled<t.WhileStatement>>): void {
// No need for isFlat check here. Loops make functions not flat.
path.node.test = bh.or(bh.and(isNormalMode, path.node.test),
bh.and(isRestoringMode, labelsIncludeTarget(getLabels(path.node))));
},
LabeledStatement: {
exit(path: NodePath<Labeled<t.LabeledStatement>>): void {
if (isFlat(path)) {
return;
}
path.replaceWith(bh.sIf(bh.or(isNormalMode,
bh.and(isRestoringMode, labelsIncludeTarget(getLabels(path.node)))),
path.node));
path.skip();
}
},
IfStatement: {
exit(path: NodePath<Labeled<t.IfStatement>>): void {
if ((<any>path.node).isTransformed || isFlat(path)) {
return;
}
const { test, consequent, alternate } = path.node;
const alternateCond = bh.or(
isNormalMode,
bh.and(isRestoringMode,
labelsIncludeTarget(getLabels(alternate))));
const newAlt = alternate === null ? alternate :
t.ifStatement(alternateCond, alternate);
const consequentCond = bh.or(
bh.and(isNormalMode, test),
bh.and(isRestoringMode, labelsIncludeTarget(getLabels(consequent))));
const newIf = t.ifStatement(consequentCond, consequent, newAlt);
path.replaceWith(newIf);
path.skip();
},
},
ReturnStatement: {
exit(path: NodePath<Labeled<t.ReturnStatement>>, s: State): void {
switch (path.node.appType) {
case AppType.None: {
if(isFlat(path)) {
return;
}
if (s.opts.newMethod === 'direct') {
const retval = fresh('retval');
const declRetval = letExpression(retval, path.node.argument);
const isObject = t.binaryExpression('instanceof',
retval, t.identifier('Object'));
const conditional = t.conditionalExpression(bh.and(newTarget,
t.unaryExpression('!', isObject)), t.thisExpression(),
retval);
path.replaceWith(t.blockStatement([
declRetval,
increaseStackSize,
t.returnStatement(conditional),
]));
path.skip();
} else {
path.insertBefore(increaseStackSize);
}
break;
}
case AppType.Mixed:
return;
case AppType.Tail:
// Increment the remainingStack before returning from a non-flat function.
if(!isFlat(path)) {
path.insertBefore(increaseStackSize);
}
// Labels may occur if this return statement occurs in a try block.
const labels = getLabels(path.node);
const ifReturn = t.ifStatement(
isNormalMode,
path.node,
bh.sIf(bh.and(isRestoringMode, labelsIncludeTarget(labels)),
t.returnStatement(stackFrameCall)));
path.replaceWith(ifReturn);
path.skip();
return;
}
}
},
CatchClause: {
exit(path: NodePath<t.CatchClause>, s: State): void {
if (s.opts.captureMethod === 'retval' || isFlat(path)) {
return;
}
const { param, body } = path.node;
body.body.unshift(t.ifStatement(
bh.or(
t.binaryExpression('instanceof', param, captureExn),
t.binaryExpression('instanceof', param, restoreExn),
t.binaryExpression('instanceof', param, endTurnExn)),
t.throwStatement(param)),
t.expressionStatement(runtimeInvoke('clearTrace')));
path.skip();
}
},
TryStatement: {
exit(path: NodePath<t.TryStatement>) {
// To understand what's happening here, see jumperizeTry.ts
if (path.node.handler) {
path.node.block.body.unshift(
bh.sIf(bh.and(isRestoringMode,
labelsIncludeTarget(getLabels(path.node.handler.body))),
t.throwStatement(<t.Identifier>(<any>path.node.handler).eVar)));
}
if (path.node.finalizer) {
path.node.finalizer = t.blockStatement([
bh.sIf(t.unaryExpression('!',
t.memberExpression(runtime, t.identifier('capturing'))),
path.node.finalizer)]);
}
}
},
ThrowStatement: {
// Transform `throw e;` to `runtime.pushTrace(l); throw e;`, where `l`
// is a string that represents the original source location of this
// statement. We assume that the `throw e` occurs in a block.
exit(path: NodePath<t.ThrowStatement>, s: State) {
const fName = bh.enclosingFunctionName(path);
const l = t.stringLiteral(h.locationString(fName, path, s.opts));
path.insertBefore(t.expressionStatement(runtimeInvoke('pushTrace', l)));
}
}
};
export function plugin(): any {
return { visitor: jumper };
} | the_stack |
import * as msRest from "@azure/ms-rest-js";
import * as Models from "../models";
import * as Mappers from "../models/viewsMappers";
import * as Parameters from "../models/parameters";
import { CustomerInsightsManagementClientContext } from "../customerInsightsManagementClientContext";
/** Class representing a Views. */
export class Views {
private readonly client: CustomerInsightsManagementClientContext;
/**
* Create a Views.
* @param {CustomerInsightsManagementClientContext} client Reference to the service client.
*/
constructor(client: CustomerInsightsManagementClientContext) {
this.client = client;
}
/**
* Gets all available views for given user in the specified hub.
* @param resourceGroupName The name of the resource group.
* @param hubName The name of the hub.
* @param userId The user ID. Use * to retreive hub level views.
* @param [options] The optional parameters
* @returns Promise<Models.ViewsListByHubResponse>
*/
listByHub(resourceGroupName: string, hubName: string, userId: string, options?: msRest.RequestOptionsBase): Promise<Models.ViewsListByHubResponse>;
/**
* @param resourceGroupName The name of the resource group.
* @param hubName The name of the hub.
* @param userId The user ID. Use * to retreive hub level views.
* @param callback The callback
*/
listByHub(resourceGroupName: string, hubName: string, userId: string, callback: msRest.ServiceCallback<Models.ViewListResult>): void;
/**
* @param resourceGroupName The name of the resource group.
* @param hubName The name of the hub.
* @param userId The user ID. Use * to retreive hub level views.
* @param options The optional parameters
* @param callback The callback
*/
listByHub(resourceGroupName: string, hubName: string, userId: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.ViewListResult>): void;
listByHub(resourceGroupName: string, hubName: string, userId: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.ViewListResult>, callback?: msRest.ServiceCallback<Models.ViewListResult>): Promise<Models.ViewsListByHubResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
hubName,
userId,
options
},
listByHubOperationSpec,
callback) as Promise<Models.ViewsListByHubResponse>;
}
/**
* Creates a view or updates an exisiting view in the hub.
* @param resourceGroupName The name of the resource group.
* @param hubName The name of the hub.
* @param viewName The name of the view.
* @param parameters Parameters supplied to the CreateOrUpdate View operation.
* @param [options] The optional parameters
* @returns Promise<Models.ViewsCreateOrUpdateResponse>
*/
createOrUpdate(resourceGroupName: string, hubName: string, viewName: string, parameters: Models.ViewResourceFormat, options?: msRest.RequestOptionsBase): Promise<Models.ViewsCreateOrUpdateResponse>;
/**
* @param resourceGroupName The name of the resource group.
* @param hubName The name of the hub.
* @param viewName The name of the view.
* @param parameters Parameters supplied to the CreateOrUpdate View operation.
* @param callback The callback
*/
createOrUpdate(resourceGroupName: string, hubName: string, viewName: string, parameters: Models.ViewResourceFormat, callback: msRest.ServiceCallback<Models.ViewResourceFormat>): void;
/**
* @param resourceGroupName The name of the resource group.
* @param hubName The name of the hub.
* @param viewName The name of the view.
* @param parameters Parameters supplied to the CreateOrUpdate View operation.
* @param options The optional parameters
* @param callback The callback
*/
createOrUpdate(resourceGroupName: string, hubName: string, viewName: string, parameters: Models.ViewResourceFormat, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.ViewResourceFormat>): void;
createOrUpdate(resourceGroupName: string, hubName: string, viewName: string, parameters: Models.ViewResourceFormat, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.ViewResourceFormat>, callback?: msRest.ServiceCallback<Models.ViewResourceFormat>): Promise<Models.ViewsCreateOrUpdateResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
hubName,
viewName,
parameters,
options
},
createOrUpdateOperationSpec,
callback) as Promise<Models.ViewsCreateOrUpdateResponse>;
}
/**
* Gets a view in the hub.
* @param resourceGroupName The name of the resource group.
* @param hubName The name of the hub.
* @param viewName The name of the view.
* @param userId The user ID. Use * to retreive hub level view.
* @param [options] The optional parameters
* @returns Promise<Models.ViewsGetResponse>
*/
get(resourceGroupName: string, hubName: string, viewName: string, userId: string, options?: msRest.RequestOptionsBase): Promise<Models.ViewsGetResponse>;
/**
* @param resourceGroupName The name of the resource group.
* @param hubName The name of the hub.
* @param viewName The name of the view.
* @param userId The user ID. Use * to retreive hub level view.
* @param callback The callback
*/
get(resourceGroupName: string, hubName: string, viewName: string, userId: string, callback: msRest.ServiceCallback<Models.ViewResourceFormat>): void;
/**
* @param resourceGroupName The name of the resource group.
* @param hubName The name of the hub.
* @param viewName The name of the view.
* @param userId The user ID. Use * to retreive hub level view.
* @param options The optional parameters
* @param callback The callback
*/
get(resourceGroupName: string, hubName: string, viewName: string, userId: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.ViewResourceFormat>): void;
get(resourceGroupName: string, hubName: string, viewName: string, userId: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.ViewResourceFormat>, callback?: msRest.ServiceCallback<Models.ViewResourceFormat>): Promise<Models.ViewsGetResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
hubName,
viewName,
userId,
options
},
getOperationSpec,
callback) as Promise<Models.ViewsGetResponse>;
}
/**
* Deletes a view in the specified hub.
* @param resourceGroupName The name of the resource group.
* @param hubName The name of the hub.
* @param viewName The name of the view.
* @param userId The user ID. Use * to retreive hub level view.
* @param [options] The optional parameters
* @returns Promise<msRest.RestResponse>
*/
deleteMethod(resourceGroupName: string, hubName: string, viewName: string, userId: string, options?: msRest.RequestOptionsBase): Promise<msRest.RestResponse>;
/**
* @param resourceGroupName The name of the resource group.
* @param hubName The name of the hub.
* @param viewName The name of the view.
* @param userId The user ID. Use * to retreive hub level view.
* @param callback The callback
*/
deleteMethod(resourceGroupName: string, hubName: string, viewName: string, userId: string, callback: msRest.ServiceCallback<void>): void;
/**
* @param resourceGroupName The name of the resource group.
* @param hubName The name of the hub.
* @param viewName The name of the view.
* @param userId The user ID. Use * to retreive hub level view.
* @param options The optional parameters
* @param callback The callback
*/
deleteMethod(resourceGroupName: string, hubName: string, viewName: string, userId: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<void>): void;
deleteMethod(resourceGroupName: string, hubName: string, viewName: string, userId: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<void>, callback?: msRest.ServiceCallback<void>): Promise<msRest.RestResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
hubName,
viewName,
userId,
options
},
deleteMethodOperationSpec,
callback);
}
/**
* Gets all available views for given user in the specified hub.
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param [options] The optional parameters
* @returns Promise<Models.ViewsListByHubNextResponse>
*/
listByHubNext(nextPageLink: string, options?: msRest.RequestOptionsBase): Promise<Models.ViewsListByHubNextResponse>;
/**
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param callback The callback
*/
listByHubNext(nextPageLink: string, callback: msRest.ServiceCallback<Models.ViewListResult>): void;
/**
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param options The optional parameters
* @param callback The callback
*/
listByHubNext(nextPageLink: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.ViewListResult>): void;
listByHubNext(nextPageLink: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.ViewListResult>, callback?: msRest.ServiceCallback<Models.ViewListResult>): Promise<Models.ViewsListByHubNextResponse> {
return this.client.sendOperationRequest(
{
nextPageLink,
options
},
listByHubNextOperationSpec,
callback) as Promise<Models.ViewsListByHubNextResponse>;
}
}
// Operation Specifications
const serializer = new msRest.Serializer(Mappers);
const listByHubOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CustomerInsights/hubs/{hubName}/views",
urlParameters: [
Parameters.resourceGroupName,
Parameters.hubName1,
Parameters.subscriptionId
],
queryParameters: [
Parameters.apiVersion,
Parameters.userId
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.ViewListResult
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const createOrUpdateOperationSpec: msRest.OperationSpec = {
httpMethod: "PUT",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CustomerInsights/hubs/{hubName}/views/{viewName}",
urlParameters: [
Parameters.resourceGroupName,
Parameters.hubName1,
Parameters.viewName0,
Parameters.subscriptionId
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
requestBody: {
parameterPath: "parameters",
mapper: {
...Mappers.ViewResourceFormat,
required: true
}
},
responses: {
200: {
bodyMapper: Mappers.ViewResourceFormat
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const getOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CustomerInsights/hubs/{hubName}/views/{viewName}",
urlParameters: [
Parameters.resourceGroupName,
Parameters.hubName1,
Parameters.viewName1,
Parameters.subscriptionId
],
queryParameters: [
Parameters.apiVersion,
Parameters.userId
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.ViewResourceFormat
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const deleteMethodOperationSpec: msRest.OperationSpec = {
httpMethod: "DELETE",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CustomerInsights/hubs/{hubName}/views/{viewName}",
urlParameters: [
Parameters.resourceGroupName,
Parameters.hubName1,
Parameters.viewName1,
Parameters.subscriptionId
],
queryParameters: [
Parameters.apiVersion,
Parameters.userId
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const listByHubNextOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
baseUrl: "https://management.azure.com",
path: "{nextLink}",
urlParameters: [
Parameters.nextPageLink
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.ViewListResult
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
}; | the_stack |
import * as nbformat from '@jupyterlab/nbformat';
import { NotebookPanel } from '@jupyterlab/notebook';
import { ElementHandle, Page } from '@playwright/test';
import * as path from 'path';
import { galata } from '../galata';
import { INotebookRunCallback } from '../inpage/tokens';
import * as Utils from '../utils';
import { ActivityHelper } from './activity';
import { ContentsHelper } from '../contents';
import { FileBrowserHelper } from './filebrowser';
import { MenuHelper } from './menu';
/**
* Notebook helpers
*/
export class NotebookHelper {
constructor(
readonly page: Page,
readonly activity: ActivityHelper,
readonly contents: ContentsHelper,
readonly filebrowser: FileBrowserHelper,
readonly menu: MenuHelper
) {}
/**
* Whether a given notebook is opened or not
*
* @param name Notebook name
* @returns Notebook opened status
*/
async isOpen(name: string): Promise<boolean> {
const tab = await this.activity.getTab(name);
return tab !== null;
}
/**
* Whether a given notebook is active or not
*
* @param name Notebook name
* @returns Notebook active status
*/
async isActive(name: string): Promise<boolean> {
return this.activity.isTabActive(name);
}
/**
* Whether a notebook is currently active or not
*
* @returns Notebook active status
*/
async isAnyActive(): Promise<boolean> {
return (await this.getNotebookInPanel()) !== null;
}
/**
* Open a notebook from its name
*
* The notebook needs to exist in the current folder.
*
* @param name Notebook name
* @returns Action success status
*/
async open(name: string): Promise<boolean> {
const isListed = await this.filebrowser.isFileListedInBrowser(name);
if (!isListed) {
return false;
}
await this.filebrowser.open(name);
return await this.isOpen(name);
}
/**
* Open a notebook from its path
*
* The notebook do not need to exist in the current folder
*
* @param filePath Notebook path
* @returns Action success status
*/
async openByPath(filePath: string): Promise<boolean> {
await this.filebrowser.open(filePath);
const name = path.basename(filePath);
return await this.isOpen(name);
}
/**
* Get the handle to a notebook panel
*
* @param name Notebook name
* @returns Handle to the Notebook panel
*/
async getNotebookInPanel(
name?: string
): Promise<ElementHandle<Element> | null> {
const nbPanel = await this.activity.getPanel(name);
if (nbPanel) {
return await nbPanel.$('.jp-NotebookPanel-notebook');
}
return null;
}
/**
* Get the handle to a notebook toolbar
*
* @param name Notebook name
* @returns Handle to the Notebook toolbar
*/
async getToolbar(name?: string): Promise<ElementHandle<Element> | null> {
const nbPanel = await this.activity.getPanel(name);
if (nbPanel) {
return await nbPanel.$('.jp-NotebookPanel-toolbar');
}
return null;
}
/**
* Get the handle to a notebook toolbar item from its index
*
* @param itemIndex Toolbar item index
* @param notebookName Notebook name
* @returns Handle to the notebook toolbar item
*/
async getToolbarItemByIndex(
itemIndex: number,
notebookName?: string
): Promise<ElementHandle<Element> | null> {
if (itemIndex === -1) {
return null;
}
const toolbar = await this.getToolbar(notebookName);
if (toolbar) {
const toolbarItems = await toolbar.$$('.jp-Toolbar-item');
if (itemIndex < toolbarItems.length) {
return toolbarItems[itemIndex];
}
}
return null;
}
/**
* Get the handle to a notebook toolbar item from its id
*
* @param itemId Toolbar item id
* @param notebookName Notebook name
* @returns Handle to the notebook toolbar item
*/
async getToolbarItem(
itemId: galata.NotebookToolbarItemId,
notebookName?: string
): Promise<ElementHandle<Element> | null> {
const toolbar = await this.getToolbar(notebookName);
if (toolbar) {
const itemIndex = await this.page.evaluate(async (itemId: string) => {
return window.galataip.getNotebookToolbarItemIndex(itemId);
}, itemId);
return this.getToolbarItemByIndex(itemIndex);
}
return null;
}
/**
* Click on a notebook toolbar item
*
* @param itemId Toolbar item id
* @param notebookName Notebook name
* @returns Action success status
*/
async clickToolbarItem(
itemId: galata.NotebookToolbarItemId,
notebookName?: string
): Promise<boolean> {
const toolbarItem = await this.getToolbarItem(itemId, notebookName);
if (toolbarItem) {
await toolbarItem.click();
return true;
}
return false;
}
/**
* Activate a notebook
*
* @param name Notebook name
* @returns Action success status
*/
async activate(name: string): Promise<boolean> {
if (await this.activity.activateTab(name)) {
await this.page.evaluate(async () => {
const galataip = window.galataip;
const nbPanel = galataip.app.shell.currentWidget as NotebookPanel;
await nbPanel.sessionContext.ready;
// Assuming that if the session is ready, the kernel is ready also for now and commenting out this line
// await nbPanel.session.kernel.ready;
galataip.app.shell.activateById(nbPanel.id);
});
return true;
}
return false;
}
/**
* Save the currently active notebook
*
* @returns Action success status
*/
async save(): Promise<boolean> {
if (!(await this.isAnyActive())) {
return false;
}
await this.page.evaluate(async () => {
await window.galataip.saveActiveNotebook();
});
return true;
}
/**
* Revert changes to the currently active notebook
*
* @returns Action success status
*/
async revertChanges(): Promise<boolean> {
if (!(await this.isAnyActive())) {
return false;
}
await this.page.evaluate(async () => {
const app = window.galataip.app;
const nbPanel = app.shell.currentWidget as NotebookPanel;
await nbPanel.context.revert();
});
return true;
}
/**
* Run all cells of the currently active notebook
*
* @returns Action success status
*/
async run(): Promise<boolean> {
if (!(await this.isAnyActive())) {
return false;
}
await this.menu.clickMenuItem('Run>Run All Cells');
await this.waitForRun();
return true;
}
/**
* Run the currently active notebook cell by cell.
*
* @param callback Cell ran callback
* @returns Action success status
*/
async runCellByCell(callback?: INotebookRunCallback): Promise<boolean> {
if (!(await this.isAnyActive())) {
return false;
}
let callbackName = '';
if (callback) {
callbackName = `_runCallbacksExposed${++this._runCallbacksExposed}`;
await this.page.exposeFunction(
`${callbackName}_onBeforeScroll`,
async () => {
if (callback && callback.onBeforeScroll) {
await callback.onBeforeScroll();
}
}
);
await this.page.exposeFunction(
`${callbackName}_onAfterScroll`,
async () => {
if (callback && callback.onAfterScroll) {
await callback.onAfterScroll();
}
}
);
await this.page.exposeFunction(
`${callbackName}_onAfterCellRun`,
async (cellIndex: number) => {
if (callback && callback.onAfterCellRun) {
await callback.onAfterCellRun(cellIndex);
}
}
);
}
await this.page.evaluate(async (callbackName: string) => {
const callbacks =
callbackName === ''
? undefined
: ({
onBeforeScroll: async () => {
await (window as any)[`${callbackName}_onBeforeScroll`]();
},
onAfterScroll: async () => {
await (window as any)[`${callbackName}_onAfterScroll`]();
},
onAfterCellRun: async (cellIndex: number) => {
await (window as any)[`${callbackName}_onAfterCellRun`](
cellIndex
);
}
} as INotebookRunCallback);
await window.galataip.runActiveNotebookCellByCell(callbacks);
}, callbackName);
return true;
}
/**
* Wait for notebook cells execution to finish
*/
async waitForRun(): Promise<void> {
await this.page.evaluate(async () => {
await window.galataip.waitForNotebookRun();
});
}
/**
* Close the notebook with or without reverting unsaved changes
*
* @param revertChanges Whether to revert changes or not
* @returns Action success status
*/
async close(revertChanges = true): Promise<boolean> {
if (!(await this.isAnyActive())) {
return false;
}
const page = this.page;
const tab = await this.activity.getTab();
if (!tab) {
return false;
}
if (revertChanges) {
if (!(await this.revertChanges())) {
return false;
}
}
const closeIcon = await tab.$('.lm-TabBar-tabCloseIcon');
if (!closeIcon) {
return false;
}
await closeIcon.click();
// close save prompt
const dialogSelector = '.jp-Dialog .jp-Dialog-content';
const dialog = await page.$(dialogSelector);
if (dialog) {
const dlgBtnSelector = revertChanges
? 'button.jp-mod-accept.jp-mod-warn' // discard
: 'button.jp-mod-accept:not(.jp-mod-warn)'; // save
const dlgBtn = await dialog.$(dlgBtnSelector);
if (dlgBtn) {
await dlgBtn.click();
}
}
await page.waitForSelector(dialogSelector, { state: 'hidden' });
return true;
}
/**
* Get the number of cells in the currently active notebook
*
* @returns Number of cells
*/
getCellCount = async (): Promise<number> => {
const notebook = await this.getNotebookInPanel();
if (!notebook) {
return -1;
}
const cells = await notebook.$$('div.jp-Cell');
return cells.length;
};
/**
* Get a cell handle
*
* @param cellIndex Cell index
* @returns Handle to the cell
*/
async getCell(cellIndex: number): Promise<ElementHandle<Element> | null> {
const notebook = await this.getNotebookInPanel();
if (!notebook) {
return null;
}
const cells = await notebook.$$('div.jp-Cell');
if (cellIndex < 0 || cellIndex >= cells.length) {
return null;
}
return cells[cellIndex];
}
/**
* Get the handle to the input of a cell
*
* @param cellIndex Cell index
* @returns Handle to the cell input
*/
async getCellInput(
cellIndex: number
): Promise<ElementHandle<Element> | null> {
const cell = await this.getCell(cellIndex);
if (!cell) {
return null;
}
const cellEditor = await cell.$('.jp-InputArea-editor');
if (!cellEditor) {
return null;
}
const isRenderedMarkdown = await cellEditor.evaluate(editor =>
editor.classList.contains('lm-mod-hidden')
);
if (isRenderedMarkdown) {
return await cell.$('.jp-MarkdownOutput');
}
return cellEditor;
}
/**
* Get the handle to the input expander of a cell
*
* @param cellIndex Cell index
* @returns Handle to the cell input expander
*/
async getCellInputExpander(
cellIndex: number
): Promise<ElementHandle<Element> | null> {
const cell = await this.getCell(cellIndex);
if (!cell) {
return null;
}
return await cell.$('.jp-InputCollapser');
}
/**
* Whether a cell input is expanded or not
*
* @param cellIndex Cell index
* @returns Cell input expanded status
*/
async isCellInputExpanded(cellIndex: number): Promise<boolean | null> {
const cell = await this.getCell(cellIndex);
if (!cell) {
return null;
}
return (await cell.$('.jp-InputPlaceholder')) === null;
}
/**
* Set the expanded status of a given input cell
*
* @param cellIndex Cell index
* @param expand Input expanded status
* @returns Action success status
*/
async expandCellInput(cellIndex: number, expand: boolean): Promise<boolean> {
const expanded = await this.isCellInputExpanded(cellIndex);
if ((expanded && expand) || (!expanded && !expand)) {
return false;
}
const inputExpander = await this.getCellInputExpander(cellIndex);
if (!inputExpander) {
return false;
}
await inputExpander.click();
return true;
}
/**
* Get the handle to a cell output expander
*
* @param cellIndex Cell index
* @returns Handle to the cell output expander
*/
async getCellOutputExpander(
cellIndex: number
): Promise<ElementHandle<Element> | null> {
const cell = await this.getCell(cellIndex);
if (!cell) {
return null;
}
const cellType = await this.getCellType(cellIndex);
return cellType === 'code' ? await cell.$('.jp-OutputCollapser') : null;
}
/**
* Whether a cell output is expanded or not
*
* @param cellIndex Cell index
* @returns Cell output expanded status
*/
async isCellOutputExpanded(cellIndex: number): Promise<boolean | null> {
const cell = await this.getCell(cellIndex);
if (!cell) {
return null;
}
return (await cell.$('.jp-OutputPlaceholder')) === null;
}
/**
* Set the expanded status of a given output cell
*
* @param cellIndex Cell index
* @param expand Output expanded status
* @returns Action success status
*/
async expandCellOutput(cellIndex: number, expand: boolean): Promise<boolean> {
const expanded = await this.isCellOutputExpanded(cellIndex);
if ((expanded && expand) || (!expanded && !expand)) {
return false;
}
const outputExpander = await this.getCellOutputExpander(cellIndex);
if (!outputExpander) {
return false;
}
await outputExpander.click();
return true;
}
/**
* Get the handle on a given output cell
*
* @param cellIndex Cell index
* @returns Output cell handle
*/
async getCellOutput(
cellIndex: number
): Promise<ElementHandle<Element> | null> {
const cell = await this.getCell(cellIndex);
if (!cell) {
return null;
}
const codeCellOutput = await cell.$('.jp-Cell-outputArea');
if (codeCellOutput) {
return codeCellOutput;
}
const mdCellOutput = await cell.$('.jp-MarkdownOutput');
if (mdCellOutput) {
return mdCellOutput;
}
return null;
}
/**
* Get all cell outputs as text
*
* @param cellIndex Cell index
* @returns List of text outputs
*/
async getCellTextOutput(cellIndex: number): Promise<string[] | null> {
const cellOutput = await this.getCellOutput(cellIndex);
if (!cellOutput) {
return null;
}
const textOutputs = await cellOutput.$$('.jp-OutputArea-output');
if (textOutputs.length > 0) {
const outputs: string[] = [];
for (const textOutput of textOutputs) {
outputs.push(
(await (
await textOutput.getProperty('textContent')
).jsonValue()) as string
);
}
return outputs;
}
return null;
}
/**
* Whether the cell is in editing mode or not
*
* @param cellIndex Cell index
* @returns Editing mode
*/
async isCellInEditingMode(cellIndex: number): Promise<boolean> {
const cell = await this.getCell(cellIndex);
if (!cell) {
return false;
}
const cellEditor = await cell.$('.jp-InputArea-editor');
if (cellEditor) {
return await cellEditor.evaluate(editor =>
editor.classList.contains('jp-mod-focused')
);
}
return false;
}
/**
* Enter the editing mode on a given cell
*
* @param cellIndex Cell index
* @returns Action success status
*/
async enterCellEditingMode(cellIndex: number): Promise<boolean> {
const cell = await this.getCell(cellIndex);
if (!cell) {
return false;
}
const cellEditor = await cell.$('.jp-Cell-inputArea');
if (cellEditor) {
let isMarkdown = false;
const cellType = await this.getCellType(cellIndex);
if (cellType === 'markdown') {
const renderedMarkdown = await cell.$('.jp-MarkdownOutput');
if (renderedMarkdown) {
isMarkdown = true;
}
}
if (isMarkdown) {
await cellEditor.dblclick();
}
await cellEditor.click();
return true;
}
return false;
}
/**
* Leave the editing mode
*
* @param cellIndex Cell index
* @returns Action success status
*/
async leaveCellEditingMode(cellIndex: number): Promise<boolean> {
if (await this.isCellInEditingMode(cellIndex)) {
await this.page.keyboard.press('Escape');
return true;
}
return false;
}
/**
* Clicks a cell gutter line for code cells
*
* @param cellIndex Cell index
* @param lineNumber Cell line number, starts at 1
*/
async clickCellGutter(
cellIndex: number,
lineNumber: number
): Promise<boolean> {
if (lineNumber < 1) {
return false;
}
if (!(await this.isCellGutterPresent(cellIndex))) {
return false;
}
const cell = await this.getCell(cellIndex);
const gutters = await cell!.$$(
'.CodeMirror-gutter-wrapper > .CodeMirror-linenumber'
);
if (gutters.length < lineNumber) {
return false;
}
await gutters[lineNumber - 1].click();
return true;
}
/**
* Check if cell gutter is present
*
* @param cellIndex
*/
async isCellGutterPresent(cellIndex: number): Promise<boolean> {
const cell = await this.getCell(cellIndex);
if (!cell) {
return false;
}
return (await cell.$('.CodeMirror-gutter-wrapper')) !== null;
}
/**
* Wait until cell gutter is visible
*
* @param cellIndex
*/
async waitForCellGutter(cellIndex: number) {
const cell = await this.getCell(cellIndex);
if (cell) {
await this.page.waitForSelector('.CodeMirror-gutter-wrapper', {
state: 'attached'
});
}
}
/**
* Select cells
*
* @param startIndex Start cell index
* @param endIndex End cell index
* @returns Action success status
*/
async selectCells(startIndex: number, endIndex?: number): Promise<boolean> {
const startCell = await this.getCell(startIndex);
if (!startCell) {
return false;
}
const clickPosition: any = { x: 15, y: 5 };
await startCell.click({ position: clickPosition });
if (endIndex !== undefined) {
const endCell = await this.getCell(endIndex);
if (!endCell) {
return false;
}
await endCell.click({ position: clickPosition, modifiers: ['Shift'] });
}
return true;
}
/**
* Whether a given cell is selected or not
*
* @param cellIndex Cell index
* @returns Selection status
*/
async isCellSelected(cellIndex: number): Promise<boolean> {
return await this.page.evaluate((cellIndex: number) => {
return window.galataip.isNotebookCellSelected(cellIndex);
}, cellIndex);
}
/**
* Delete selected cells
*
* @returns Action success status
*/
async deleteCells(): Promise<boolean> {
if (!(await this.isAnyActive())) {
return false;
}
await this.page.evaluate(() => {
return window.galataip.deleteNotebookCells();
});
return true;
}
/**
* Add a cell to the currently active notebook
*
* @param cellType Cell type
* @param source Source
* @returns Action success status
*/
async addCell(cellType: nbformat.CellType, source: string): Promise<boolean> {
if (!(await this.isAnyActive())) {
return false;
}
const numCells = await this.getCellCount();
await this.selectCells(numCells - 1);
await this.clickToolbarItem('insert');
await Utils.waitForCondition(
async (): Promise<boolean> => {
return (await this.getCellCount()) === numCells + 1;
}
);
return await this.setCell(numCells, cellType, source);
}
/**
* Set the input source of a cell
*
* @param cellIndex Cell index
* @param cellType Cell type
* @param source Source
* @returns Action success status
*/
async setCell(
cellIndex: number,
cellType: nbformat.CellType,
source: string
): Promise<boolean> {
if (!(await this.isAnyActive())) {
return false;
}
await this.setCellType(cellIndex, cellType);
if (
!(await this.isCellSelected(cellIndex)) &&
!(await this.selectCells(cellIndex))
) {
return false;
}
await this.enterCellEditingMode(cellIndex);
const keyboard = this.page.keyboard;
await keyboard.press('Control+A');
// give CodeMirror time to style properly
await keyboard.type(source, { delay: cellType === 'code' ? 100 : 0 });
await this.leaveCellEditingMode(cellIndex);
// give CodeMirror time to style properly
if (cellType === 'code') {
await this.page.waitForTimeout(500);
}
return true;
}
/**
* Set the type of a cell
*
* @param cellIndex Cell index
* @param cellType Cell type
* @returns Action success status
*/
async setCellType(
cellIndex: number,
cellType: nbformat.CellType
): Promise<boolean> {
const nbPanel = await this.activity.getPanel();
if (!nbPanel) {
return false;
}
if ((await this.getCellType(cellIndex)) === cellType) {
return false;
}
if (!(await this.selectCells(cellIndex))) {
return false;
}
await this.clickToolbarItem('cellType');
const selectInput = await nbPanel.$(
'div.jp-Notebook-toolbarCellTypeDropdown select'
);
if (!selectInput) {
return false;
}
await selectInput.selectOption(cellType);
return true;
}
/**
* Get the cell type of a cell
*
* @param cellIndex Cell index
* @returns Cell type
*/
async getCellType(cellIndex: number): Promise<nbformat.CellType | null> {
const notebook = await this.getNotebookInPanel();
if (!notebook) {
return null;
}
const cells = await notebook.$$('div.jp-Cell');
if (cellIndex < 0 || cellIndex >= cells.length) {
return null;
}
const cell = cells[cellIndex];
const classList = await Utils.getElementClassList(cell);
if (classList.indexOf('jp-CodeCell') !== -1) {
return 'code';
} else if (classList.indexOf('jp-MarkdownCell') !== -1) {
return 'markdown';
} else if (classList.indexOf('jp-RawCell') !== -1) {
return 'raw';
}
return null;
}
/**
* Run a given cell
*
* @param cellIndex Cell index
* @param inplace Whether to stay on the cell or select the next one
* @returns Action success status
*/
async runCell(cellIndex: number, inplace?: boolean): Promise<boolean> {
if (!(await this.isAnyActive())) {
return false;
}
if (
!(await this.isCellSelected(cellIndex)) &&
!(await this.selectCells(cellIndex))
) {
return false;
}
await this.page.keyboard.press(
inplace === true ? 'Control+Enter' : 'Shift+Enter'
);
await this.waitForRun();
return true;
}
/**
* Create a new notebook
*
* @param name Name of the notebook
* @returns Name of the created notebook or null if it failed
*/
async createNew(name?: string): Promise<string | null> {
await this.menu.clickMenuItem('File>New>Notebook');
const page = this.page;
await page.waitForSelector('.jp-Dialog');
await page.click('.jp-Dialog .jp-mod-accept');
const activeTab = await this.activity.getTab();
if (!activeTab) {
return null;
}
const label = await activeTab.$('div.lm-TabBar-tabLabel');
if (!label) {
return null;
}
const assignedName = (await (
await label.getProperty('textContent')
).jsonValue()) as string;
if (!name) {
return assignedName;
}
const currentDir = await this.filebrowser.getCurrentDirectory();
await this.contents.renameFile(
`${currentDir}/${assignedName}`,
`${currentDir}/${name}`
);
const renamedTab = await this.activity.getTab(name);
return renamedTab ? name : null;
}
private _runCallbacksExposed = 0;
} | the_stack |
import { Pulse } from '../pulse';
import { State, Group, PrimaryKey, GroupName, GroupAddOptions, Selector, Data, SelectorName } from '../internal';
import { defineConfig, shallowmerge } from '../utils';
import { deepmerge } from '../helpers/deepmerge';
import { normalizeArray } from '../utils';
// Shorthand for an expandable object
export type DefaultDataItem = Record<string, any>;
interface RemoveOptions {
fromGroups: (groups: string | number | Array<string>) => any;
everywhere: () => any;
}
// Defaults for collection sub instance objects, used as generics
export type GroupObj<DataType> = Record<GroupName, Group<DataType>>;
export type SelectorObj<DataType> = Record<SelectorName, Selector<DataType>>;
// Interface for the collection config object
export interface CollectionConfig {
name?: string;
primaryKey?: string | number;
defaultGroup?: boolean;
}
// An optional type defining config as either an object, or an object that returns a function
export type Config<DataType = DefaultDataItem> = CollectionConfig | ((collection: Collection<DataType>) => CollectionConfig);
// The collection class, should be created by the Pulse class for functioning types
export class Collection<
DataType extends DefaultDataItem = DefaultDataItem,
G extends GroupObj<DataType> = GroupObj<DataType>,
S extends SelectorObj<DataType> = SelectorObj<DataType>
> {
public config: Required<CollectionConfig>;
// collection data is stored here
public data: { [key: string]: Data<DataType> } = {};
public groups: G = {} as G;
public selectors: S = {} as S;
public _provisionalData: { [key: string]: Data<DataType> } = {};
public _provisionalGroups: { [key: string]: Group<DataType> } = {};
public _computedFunc?: (data: DataType) => DataType;
public _collectFunc?: (data: DataType) => DataType;
// the amount of data items stored inside this collection
public get size(): number {
return Object.keys(this.data).length;
}
// a getter to return the default group value
public get items(): DataType[] {
const defaultGroup = this.groups?.default;
if (!defaultGroup) return [];
return defaultGroup.output;
}
// collection config can either be an object of type CollectionConfig or a function that returns CollectionConfig
constructor(public instance: () => Pulse, config: Config<DataType>) {
if (this.instance()._collections.has(this)) this.instance()._collections.delete(this);
this.instance()._collections.forEach(c => {
if (c?.config?.name && config?.name) {
if (c.config.name === config.name) this.instance()._collections.delete(c);
}
});
this.instance()._collections.add(this);
// if collection config is a function, execute and assign to config
if (typeof config === 'function') config = config(this) as CollectionConfig;
// assign defaults to config object ensuring type safety
this.config = defineConfig<typeof config>(config, {
primaryKey: 'id'
}) as Required<typeof config>;
if (this.config.defaultGroup) this.createGroup('default');
}
/**
* Create a group instance under this collection
* @param groupName - The name of the group you would like to create
* @param initialIndex - An optional array of primary keys to initialize this groups with.
*/
public createGroup<GN extends GroupName>(groupName: GN, initialIndex?: Array<PrimaryKey>) {
// if (this.groups[groupName]) return this;
const group = new Group<DataType>(() => this, initialIndex, { name: groupName });
this.groups[groupName] = (group as unknown) as G[GN];
//@ts-ignore - doesn't error in vscode, but errors at build
return this as this & Collection<DataType, Record<GN, Group<DataType>>, S>;
}
/**
* Create several group instances under this collection
*/
public createGroups<GroupNames extends GroupName>(groupNames: [GroupNames, ...GroupNames[]]) {
for (const name of groupNames) this.createGroup(name);
//@ts-ignore - doesn't error in vscode, but errors at build
return this as this & Collection<DataType, { [key in GroupNames]: Group<DataType> }, S>;
}
/**
* Create a selector instance under this collection
* @param selectorName - The name of the selector you would like to create
* @param initialSelection - An initial PrimaryKey (string or number) to select.
* Supports selecting data that does not yet exist, will update if that data item is eventually collected.
*/
public createSelector<SN extends SelectorName>(selectorName: SN, initialSelection?: string | number) {
const selector = new Selector<DataType>(() => this, initialSelection);
this.selectors[selectorName] = (selector as unknown) as S[SN];
//@ts-ignore - doesn't error in vscode, but errors at build
return this as this & Collection<DataType, G, { [key in SN]: Selector<DataType> }>;
}
/**
*
* @param selectorNames - An array of names to give new selectors
* @returns The new Collection
*/
public createSelectors<SelectorNames extends SelectorName>(selectorNames: [SelectorNames, ...SelectorNames[]]) {
for (const name of selectorNames) this.createSelector(name);
//@ts-ignore - doesn't error in vscode, but errors at build
return this as this & Collection<DataType, G, { [key in SelectorNames]: Selector<DataType> }>;
}
public model(config: (...args: any) => any, options: Record<string, any>) {
return this;
}
public persist(config: Record<string, string>) {
return this;
}
// save data directly into collection storage
public saveData(data: DataType, patch?: boolean): PrimaryKey | null {
let key = this.config.primaryKey;
if (!data || !data.hasOwnProperty(key)) return null;
if (this._collectFunc) data = this._collectFunc(data);
const existingData = this.data[data[key]];
// if the data already exists and config is to patch, patch data
if (patch && existingData) existingData.patch(data, { deep: false });
// if already exists and no config, overwrite data
else if (existingData) existingData.set(data);
// if provisional data exists for this key, migrate data instance
else if (this._provisionalData.hasOwnProperty(data[key])) {
this.data[data[key]] = this._provisionalData[data[key]];
// update provisional data instance with new data
if (patch) {
this.data[data[key]].patch(data, { deep: false });
} else {
this.data[data[key]].set(data);
}
// cleanup provisional data
delete this._provisionalData[data[key]];
}
// otherwise create new data instance
else this.data[data[key]] = new Data<DataType>(() => this, data);
return data[key];
}
/**
* Collect iterable data into this collection. Note:
* - Data items must include a primary key (id)
* @param data - Array of data, or single data object
* @param groups - Array of group names or single group name
*/
public collect(
items: DataType | DataType[],
groups?: GroupName | GroupName[],
config: {
patch?: boolean;
method?: 'push' | 'unshift';
forEachItem?: (item: DataType, key: PrimaryKey, index: number) => DataType;
} = {}
): void {
const _items = normalizeArray(items);
groups = normalizeArray(groups);
// is default group enabled? if so add default if not already present
if (this.config.defaultGroup && groups.indexOf('default') === -1) groups.push('default');
// if any of the groups don't already exist, create them
for (let groupName of groups) !this.groups.hasOwnProperty(groupName) && this.createGroup(groupName);
// if method is unshift reverse array order to maintain correct order
if (config.method === 'unshift') _items.reverse();
for (let [index, item] of _items.entries()) {
if (config.forEachItem) item = config.forEachItem(item, item[this.config.primaryKey], index);
let key = this.saveData(item, config.patch);
if (key === null) return;
(groups as Array<string>).forEach(groupName => {
let group = this.groups[groupName];
if (!group.nextState.includes(key)) group.nextState[config.method || 'push'](key);
});
}
for (let groupName of groups) this.instance().runtime.ingest(this.groups[groupName], this.groups[groupName].nextState);
}
/**
* Return an item from this collection by primaryKey as Data instance (extends State)
* @param primaryKey - The primary key of the data
*/
public getData(id: PrimaryKey | State, options: { createProvisional: boolean } = { createProvisional: true }): Data<DataType> {
if (id instanceof State) id = id.value;
if (!this.data.hasOwnProperty(id as PrimaryKey) && options.createProvisional) {
if (this._provisionalData[id as PrimaryKey]) return this._provisionalData[id as PrimaryKey];
const data = new Data(() => this, ({ id } as unknown) as DataType);
this._provisionalData[id as PrimaryKey] = data;
return data;
}
return this.data[id as PrimaryKey];
}
public getDataValue(id: PrimaryKey | State): DataType | null {
let data = this.getData(id, { createProvisional: false })?.value;
if (!data) return null;
return this._computedFunc ? this._computedFunc(data) : data;
}
/**
* Return an group from this collection as Group instance (extends State)
* @param groupName - The name of your group
*/
public getGroup(groupName: string): Group<DataType>;
public getGroup(groupName: keyof G): Group<DataType>;
public getGroup(groupName: keyof G | string): Group<DataType> {
// @TEMP fixes but with provisional (because provisional sucks)
if (!this.groups[groupName])
this.groups[groupName] = new Group<DataType>(() => this, [], { provisional: true, name: groupName as string }) as any;
return this.groups[groupName];
// // if provisional group exists return that
// else if (this._provisionalGroups[groupName as GroupName]) return this._provisionalGroups[groupName as GroupName];
// // if no group found create a provisional group
// else return this.createProvisionalGroup(groupName as GroupName);
}
public getGroupValue(groupName: keyof G | string): DataType[] {
return this.getGroup(groupName).output;
}
/**
* Return an group from this collection as Group instance (extends State)
* @param groupName - The name of your group
*/
public getSelector(selectorName: keyof S): Selector<DataType> {
return this.selectors[selectorName];
}
public getSelectorValue(selectorName: keyof S): DataType {
return this.getSelector(selectorName).value;
}
private createProvisionalGroup(groupName: GroupName) {
const group = new Group<DataType>(() => this, [], { provisional: true, name: groupName as string });
this._provisionalGroups[groupName] = group;
return group;
}
public getDataValueByIndex(indexName: string, value: string): DataType {
return;
}
/**
* Update data by id in a Pulse Collection
* @param updateKey - The primary key of the item to update
* @param changes - This object will be deep merged with the original
*/
public update(updateKey: PrimaryKey | State, changes: Partial<DataType> = {}, config: { deep?: boolean } = {}): State {
// if State instance passed as updateKey grab the value
if (updateKey instanceof State) updateKey = updateKey.value;
updateKey = updateKey as PrimaryKey;
// if the primary key is changed, this will be true
let updateDataKey: boolean = false,
// define aliases
data = this.data[updateKey],
primary = this.config.primaryKey;
// if the data does not exist
if (!this.data.hasOwnProperty(updateKey)) return;
// create a copy of the value for mutation
const currentData = data.copy();
// if the new object contains a primary key, it means we need to change the primary key
// on the collection too, however we should defer this until after the new data is ingested into the runtime queue
if (changes[primary]) updateDataKey = true;
// deep merge the new data with the existing data
const final = config.deep ? deepmerge(currentData, changes) : shallowmerge(currentData, changes);
// assign the merged data to the next state of the State and ingest
data.nextState = final;
this.instance().runtime.ingest(data);
// if the data key has changed move it internally and amend groups
if (updateDataKey) this.updateDataKey(currentData[primary], final[primary]);
this.rebuildGroupsThatInclude(final[primary]);
// return the Data instance
return this.data[final[primary]];
}
public compute(func: (data: DataType) => DataType): void {
this._computedFunc = func;
}
public onCollect(func: (data: DataType) => DataType): void {
this._collectFunc = func;
}
/**
* Update data by id in a Pulse Collection
* @param primaryKeysOrKeys - The primary key array of keys of the item(s) to update
* @param groupNameOrNames - Group name or array of names
*/
public put(primaryKeysOrKeys: PrimaryKey | PrimaryKey[], groupNameOrNames: GroupName | GroupName[], options?: GroupAddOptions) {
normalizeArray(groupNameOrNames).forEach(groupName => {
if (!this.groups.hasOwnProperty(groupName)) this.createGroup(groupName);
this.groups[groupName].add(primaryKeysOrKeys, options);
});
}
/**
* this is an alias function that returns other functions for removing data from a collection
*/
public remove(primaryKeysOrKeys: PrimaryKey | PrimaryKey[]): RemoveOptions {
const primaryKeys = normalizeArray(primaryKeysOrKeys);
return {
fromGroups: (groups: Array<string>) => this.removeFromGroups(primaryKeys, groups),
everywhere: () => this.deleteData(primaryKeys, Object.keys(this.groups))
};
}
public removeFromGroups(primaryKeyOrKeys: PrimaryKey | PrimaryKey[], groupNameOrNames: GroupName | GroupName[]): boolean {
const primaryKeys = normalizeArray(primaryKeyOrKeys);
const groupNames = normalizeArray(groupNameOrNames);
groupNames.forEach(groupName => {
if (!this.groups[groupName]) return;
let group = this.getGroup(groupName);
// this loop is bad, the group should be able to handle a remove action with many keys
(primaryKeys as Array<PrimaryKey>).forEach(primaryKey => {
group.remove(primaryKey);
});
});
return true;
}
public deleteData(primaryKeyOrKeys: PrimaryKey | PrimaryKey[], groupNameOrNames: GroupName | GroupName[]): boolean {
const primaryKeys = normalizeArray(primaryKeyOrKeys);
const groupNames = normalizeArray(groupNameOrNames);
primaryKeys.forEach(key => {
delete this.data[key];
groupNames.forEach(groupName => this.groups[groupName].remove(key));
});
return true;
}
private updateDataKey(oldKey: PrimaryKey, newKey: PrimaryKey): void {
// create copy of data
const dataCopy = this.data[oldKey];
// delete old reference
delete this.data[oldKey];
// apply the data in storage
this.data[newKey] = dataCopy;
// update groups
for (let groupName in this.groups) {
const group = this.getGroup(groupName);
// if group does not contain oldKey, continue.
if (!group._value.includes(oldKey)) continue;
// replace the primaryKey at current index
group.nextState.splice(group.nextState.indexOf(oldKey), 1, newKey);
// ingest the group
this.instance().runtime.ingest(group);
}
}
public has(primaryKey: PrimaryKey): boolean {
return !!this.data.hasOwnProperty(primaryKey);
}
public rebuildGroupsThatInclude(primaryKey: PrimaryKey): void {
Object.values(this.groups).forEach(group => group.rebuildOne(primaryKey));
if (Object.keys(this._provisionalGroups).length > 0) Object.values(this._provisionalGroups).forEach(group => group.rebuildOne(primaryKey));
}
public getGroupsWith(primaryKey: PrimaryKey, config: { includeDefault?: boolean } = {}): Group[] {
config = defineConfig(config, {
includeDefault: true
});
const groups: Array<Group> = [];
for (let key in this.groups) {
const group = this.getGroup(key);
if (group.has(primaryKey)) {
if (!config.includeDefault && group.name === 'default') {
continue;
}
groups.push(group);
}
}
return groups;
}
public getGroupNamesWith(primaryKey: PrimaryKey) {
return this.getGroupsWith(primaryKey).map(group => group.name);
}
public reset() {
// reset data
this.data = {};
// reset groups
const groups = Object.keys(this.groups);
groups.forEach(groupName => this.groups[groupName].reset());
//reset selectors
const selectors = Object.keys(this.selectors);
selectors.forEach(selectorName => this.selectors[selectorName].reset());
}
/**
* @deprecated Please use Collection.getData
*/
public findById(id: PrimaryKey | State): Data<DataType> {
return this.getData(id);
}
/**
* @deprecated Please use Collection.getDataValue
*/
public getValueById(id: PrimaryKey | State): DataType | null {
return this.getDataValue(id);
}
}
export default Collection; | the_stack |
import { Duration, Timezone } from "chronoshift";
import { List } from "immutable";
import {
$,
AttributeInfo,
AttributeJSs,
Attributes,
CustomDruidAggregations,
CustomDruidTransforms,
Executor,
Expression,
ExpressionJS,
External,
RefExpression
} from "plywood";
import { quoteNames, verifyUrlSafeName } from "../../utils/general/general";
import { Cluster } from "../cluster/cluster";
import { Dimension, DimensionKind, timeDimension } from "../dimension/dimension";
import {
allDimensions,
ClientDimensions,
DimensionOrGroupJS,
Dimensions,
findDimensionByExpression,
findDimensionByName,
fromConfig as dimensionsFromConfig,
prepend,
serialize as dimensionsSerialize,
SerializedDimensions
} from "../dimension/dimensions";
import { RelativeTimeFilterClause, TimeFilterPeriod } from "../filter-clause/filter-clause";
import { EMPTY_FILTER, Filter, FilterJS } from "../filter/filter";
import {
allMeasures,
findMeasureByName,
fromConfig as measuresFromConfig,
hasMeasureWithName,
MeasureOrGroupJS,
Measures,
serialize as measuresSerialize, SerializedMeasures
} from "../measure/measures";
import { QueryDecoratorDefinition, QueryDecoratorDefinitionJS } from "../query-decorator/query-decorator";
import { RefreshRule, RefreshRuleJS } from "../refresh-rule/refresh-rule";
import { SeriesList } from "../series-list/series-list";
import { EMPTY_SPLITS, Splits } from "../splits/splits";
import { Timekeeper } from "../timekeeper/timekeeper";
import { attachExternalExecutor, QueryableDataCube } from "./queryable-data-cube";
export const DEFAULT_INTROSPECTION: Introspection = "autofill-all";
const INTROSPECTION_VALUES = new Set(["none", "no-autofill", "autofill-dimensions-only", "autofill-measures-only", "autofill-all"]);
export const DEFAULT_DEFAULT_TIMEZONE = Timezone.UTC;
const DEFAULT_DEFAULT_FILTER = EMPTY_FILTER;
const DEFAULT_DEFAULT_SPLITS = EMPTY_SPLITS;
export const DEFAULT_DEFAULT_DURATION = Duration.fromJS("P1D");
export const DEFAULT_MAX_SPLITS = 3;
export const DEFAULT_MAX_QUERIES = 500;
function checkDimensionsAndMeasuresNamesUniqueness(dimensions: Dimensions, measures: Measures, dataCubeName: string) {
if (dimensions != null && measures != null) {
const dimensionNames = Object.keys(dimensions.byName);
const measureNames = Object.keys(measures.byName);
const duplicateNames = List(measureNames)
.concat(dimensionNames)
.groupBy(name => name)
.filter(names => names.count() > 1)
.map((names, name) => name)
.toList();
if (duplicateNames.size > 0) {
throw new Error(`data cube: '${dataCubeName}', names: ${quoteNames(duplicateNames)} found in both dimensions and measures'`);
}
}
}
export type Introspection =
"none"
| "no-autofill"
| "autofill-dimensions-only"
| "autofill-measures-only"
| "autofill-all";
export type Source = string | string[];
export interface DataCube {
name: string;
title: string;
description: string;
extendedDescription?: string;
clusterName: string;
source: Source;
group?: string;
subsetExpression: Expression;
rollup: boolean;
options: DataCubeOptions;
introspection?: Introspection;
attributeOverrides: Attributes;
attributes: Attributes;
derivedAttributes: Record<string, Expression>;
dimensions: Dimensions;
measures: Measures;
timeAttribute?: RefExpression;
defaultTimezone: Timezone;
defaultFilter?: Filter;
defaultSplitDimensions: string[];
defaultDuration: Duration;
defaultSortMeasure?: string;
defaultSelectedMeasures: string[];
defaultPinnedDimensions: string[];
refreshRule: RefreshRule;
maxSplits: number;
maxQueries: number;
queryDecorator?: QueryDecoratorDefinition;
cluster?: Cluster;
}
export interface DataCubeJS {
name: string;
title?: string;
description?: string;
extendedDescription?: string;
clusterName: string;
source: Source;
group?: string;
subsetFormula?: string;
rollup?: boolean;
options?: DataCubeOptions;
introspection?: Introspection;
attributeOverrides?: AttributeJSs;
attributes?: AttributeJSs;
derivedAttributes?: Record<string, ExpressionJS>;
dimensions?: DimensionOrGroupJS[];
measures?: MeasureOrGroupJS[];
timeAttribute?: string;
defaultTimezone?: string;
defaultFilter?: FilterJS;
defaultSplitDimensions?: string[];
defaultDuration?: string;
defaultSortMeasure?: string;
defaultSelectedMeasures?: string[];
defaultPinnedDimensions?: string[];
refreshRule?: RefreshRuleJS;
maxSplits?: number;
maxQueries?: number;
queryDecorator?: QueryDecoratorDefinitionJS;
}
export interface SerializedDataCube {
name: string;
title: string;
description: string;
extendedDescription?: string;
clusterName: string;
source: Source;
group: string;
rollup: boolean;
options: DataCubeOptions;
attributes: AttributeJSs;
dimensions: SerializedDimensions;
measures: SerializedMeasures;
timeAttribute?: string;
defaultTimezone: string;
defaultFilter?: FilterJS;
defaultSplitDimensions?: string[];
defaultDuration: string;
defaultSortMeasure?: string;
defaultSelectedMeasures: string[];
defaultPinnedDimensions: string[];
refreshRule: RefreshRuleJS;
maxSplits: number;
}
export interface ClientDataCube {
name: string;
title: string;
description: string;
extendedDescription?: string;
clusterName: string;
source: Source;
group?: string;
rollup: boolean;
options: DataCubeOptions;
attributes: Attributes;
dimensions: ClientDimensions;
measures: Measures;
timeAttribute?: string;
defaultTimezone: Timezone;
defaultFilter?: Filter;
defaultSplitDimensions?: string[];
defaultDuration: Duration;
defaultSortMeasure?: string;
defaultSelectedMeasures: string[];
defaultPinnedDimensions: string[];
refreshRule: RefreshRule;
maxSplits: number;
executor: Executor;
}
function readDescription({
description,
extendedDescription
}: DataCubeJS): { description: string, extendedDescription?: string } {
if (!description) {
return { description: "" };
}
if (extendedDescription) {
return { description, extendedDescription };
}
const segments = description.split(/\n---\n/);
if (segments.length === 0) {
return { description };
}
return {
description: segments[0],
extendedDescription: segments.splice(1).join("\n---\n ")
};
}
interface LegacyDataCubeJS {
subsetFilter?: string;
}
function readIntrospection(config: DataCubeJS): Introspection {
const introspection = config.introspection || DEFAULT_INTROSPECTION;
if (!INTROSPECTION_VALUES.has(introspection)) {
throw new Error(`invalid introspection value ${introspection}, must be one of ${[...INTROSPECTION_VALUES].join(", ")}`);
}
return introspection;
}
function readName(config: DataCubeJS): string {
const name = config.name;
if (!name) throw new Error("DataCube must have a name");
verifyUrlSafeName(name);
return name;
}
function verifyCluster(config: DataCubeJS, cluster?: Cluster) {
if (cluster === undefined) return;
if (config.clusterName !== cluster.name) {
throw new Error(`Cluster name '${config.clusterName}' was given but '${cluster.name}' cluster was supplied (must match)`);
}
}
function readAttributes(config: DataCubeJS): Pick<DataCube, "attributes" | "attributeOverrides" | "derivedAttributes"> {
const attributeOverrides = AttributeInfo.fromJSs(config.attributeOverrides || []);
const attributes = AttributeInfo.fromJSs(config.attributes || []);
const derivedAttributes = config.derivedAttributes ? Expression.expressionLookupFromJS(config.derivedAttributes) : {};
return {
attributes,
attributeOverrides,
derivedAttributes
};
}
function readTimeAttribute(config: DataCubeJS, cluster?: Cluster): RefExpression | undefined {
const timeAttributeName = config.timeAttribute;
if (timeAttributeName) return $(timeAttributeName);
if (cluster && cluster.type === "druid" && !timeAttributeName) {
return $("__time");
}
return undefined;
}
function readDimensions(config: DataCubeJS, timeAttribute?: RefExpression): Dimensions {
const dimensions = dimensionsFromConfig(config.dimensions || []);
if (timeAttribute && findDimensionByExpression(dimensions, timeAttribute) === null) {
return prepend(timeDimension(timeAttribute), dimensions);
}
return dimensions;
}
function readColumns(config: DataCubeJS, timeAttribute: RefExpression): { dimensions: Dimensions, measures: Measures } {
const name = config.name;
try {
const dimensions = readDimensions(config, timeAttribute);
const measures = measuresFromConfig(config.measures || []);
checkDimensionsAndMeasuresNamesUniqueness(dimensions, measures, name);
return {
dimensions,
measures
};
} catch (e) {
throw new Error(`data cube: '${name}', ${e.message}`);
}
}
function verifyDefaultSortMeasure(config: DataCubeJS, measures: Measures) {
if (config.defaultSortMeasure) {
if (!hasMeasureWithName(measures, config.defaultSortMeasure)) {
throw new Error(`Can not find defaultSortMeasure '${config.defaultSortMeasure}' in data cube '${config.name}'`);
}
}
}
function readDefaultFilter(config: DataCubeJS): Filter | undefined {
if (!config.defaultFilter) return undefined;
try {
return Filter.fromJS(config.defaultFilter);
} catch {
throw new Error(`Incorrect format of default filter for ${config.name}. Ignoring field`);
}
}
export function fromConfig(config: DataCubeJS & LegacyDataCubeJS, cluster?: Cluster): DataCube {
const name = readName(config);
const introspection = readIntrospection(config);
verifyCluster(config, cluster);
const { attributes, attributeOverrides, derivedAttributes } = readAttributes(config);
const refreshRule = config.refreshRule ? RefreshRule.fromJS(config.refreshRule) : RefreshRule.query();
const timeAttribute = readTimeAttribute(config, cluster);
const { dimensions, measures } = readColumns(config, timeAttribute);
verifyDefaultSortMeasure(config, measures);
const subsetFormula = config.subsetFormula || config.subsetFilter;
const defaultFilter = readDefaultFilter(config);
const { description, extendedDescription } = readDescription(config);
return {
name,
title: config.title || config.name,
description,
extendedDescription,
clusterName: config.clusterName || "druid",
source: config.source || config.name,
group: config.group || null,
subsetExpression: subsetFormula ? Expression.fromJSLoose(subsetFormula) : Expression.TRUE,
rollup: Boolean(config.rollup),
options: config.options || {},
introspection,
attributeOverrides,
attributes,
derivedAttributes,
dimensions,
measures,
timeAttribute,
defaultFilter,
defaultTimezone: config.defaultTimezone ? Timezone.fromJS(config.defaultTimezone) : DEFAULT_DEFAULT_TIMEZONE,
defaultSplitDimensions: config.defaultSplitDimensions || [],
defaultDuration: config.defaultDuration ? Duration.fromJS(config.defaultDuration) : DEFAULT_DEFAULT_DURATION,
defaultSortMeasure: getDefaultSortMeasure(config, measures),
defaultSelectedMeasures: config.defaultSelectedMeasures || [],
defaultPinnedDimensions: config.defaultPinnedDimensions || [],
maxSplits: config.maxSplits || DEFAULT_MAX_SPLITS,
maxQueries: config.maxQueries || DEFAULT_MAX_QUERIES,
queryDecorator: config.queryDecorator ? QueryDecoratorDefinition.fromJS(config.queryDecorator) : null,
refreshRule,
cluster
};
}
export function serialize(dataCube: DataCube): SerializedDataCube {
const {
attributes,
clusterName,
defaultDuration,
defaultFilter,
defaultPinnedDimensions,
defaultSelectedMeasures,
defaultSortMeasure,
defaultSplitDimensions,
defaultTimezone,
description,
dimensions,
extendedDescription,
group,
maxSplits,
measures,
name,
options,
refreshRule,
rollup,
source,
timeAttribute,
title
} = dataCube;
return {
attributes: attributes.map(a => a.toJS()),
clusterName,
defaultDuration: defaultDuration && defaultDuration.toJS(),
defaultFilter: defaultFilter && defaultFilter.toJS(),
defaultPinnedDimensions,
defaultSelectedMeasures,
defaultSortMeasure,
defaultSplitDimensions,
defaultTimezone: defaultTimezone.toJS(),
description,
dimensions: dimensionsSerialize(dimensions),
extendedDescription,
group,
maxSplits,
measures: measuresSerialize(measures),
name,
options,
refreshRule: refreshRule.toJS(),
rollup,
source,
timeAttribute: timeAttribute.name,
title
};
}
export interface DataCubeOptions {
customAggregations?: CustomDruidAggregations;
customTransforms?: CustomDruidTransforms;
druidContext?: Record<string, unknown>;
}
export function fromClusterAndExternal(name: string, cluster: Cluster, external: External): QueryableDataCube {
const dataCube = fromConfig({
name,
clusterName: cluster.name,
source: String(external.source),
refreshRule: RefreshRule.query().toJS()
}, cluster);
return attachExternalExecutor(dataCube, external);
}
export function getMaxTime({ name, refreshRule }: ClientDataCube, timekeeper: Timekeeper): Date {
if (refreshRule.isRealtime()) {
return timekeeper.now();
} else if (refreshRule.isFixed()) {
return refreshRule.time;
} else { // refreshRule is query
return timekeeper.getTime(name);
}
}
export function getDimensionsByKind(dataCube: { dimensions: Dimensions }, kind: DimensionKind): Dimension[] {
return allDimensions(dataCube.dimensions).filter(d => d.kind === kind);
}
export function isTimeAttribute(dataCube: ClientDataCube, ex: Expression) {
return ex instanceof RefExpression && ex.name === dataCube.timeAttribute;
}
export function getDefaultFilter(dataCube: ClientDataCube): Filter {
const filter = dataCube.defaultFilter || DEFAULT_DEFAULT_FILTER;
if (!dataCube.timeAttribute) return filter;
return filter.insertByIndex(0, new RelativeTimeFilterClause({
period: TimeFilterPeriod.LATEST,
duration: dataCube.defaultDuration,
reference: dataCube.timeAttribute
}));
}
export function getDefaultSplits(dataCube: ClientDataCube): Splits {
if (dataCube.defaultSplitDimensions) {
const dimensions = dataCube.defaultSplitDimensions.map(name => findDimensionByName(dataCube.dimensions, name));
return Splits.fromDimensions(dimensions);
}
return DEFAULT_DEFAULT_SPLITS;
}
export function getDefaultSeries(dataCube: ClientDataCube): SeriesList {
if (dataCube.defaultSelectedMeasures) {
return SeriesList.fromMeasures(dataCube.defaultSelectedMeasures.map(name => findMeasureByName(dataCube.measures, name)));
}
const first4Measures = allMeasures(dataCube.measures).slice(0, 4);
return SeriesList.fromMeasures(first4Measures);
}
export function getDefaultSortMeasure(dataCube: { defaultSortMeasure?: string }, measures: Measures): string | undefined {
if (dataCube.defaultSortMeasure) return dataCube.defaultSortMeasure;
const firstMeasure = allMeasures(measures)[0];
if (firstMeasure) return firstMeasure.name;
return undefined;
} | the_stack |
import { MarkdownIt, RuleBlock, RuleInline, Renderer, Token } from 'markdown-it';
import { renderTabularInline } from "./md-renderer-rules/render-tabular";
// import { escapeHtml } from 'markdown-it/lib/common/utils';
const getAbstractTemplate = (content) => `<h4 style="text-align: center">Abstract</h4><p style="text-indent: 1em">${content}</p>`;
let subsectionParentCount: number = 0;
let sectionCount: number = 0;
let subCount: number = 0;
let subSubCount: number = 0;
let isNewSect: boolean = false;
let isNewSubSection: boolean = false;
export const resetCounter: RuleInline = () => {
subsectionParentCount = 0;
};
export const resetTextCounter: RuleInline = () => {
subsectionParentCount = 0;
sectionCount = 0;
subCount = 0;
subSubCount = 0;
};
const headingSection: RuleBlock = (state, startLine: number, endLine: number) => {
sectionCount = 0;
subCount = 0;
subSubCount = 0;
let token: Token, lineText: string,
pos: number = state.bMarks[startLine] + state.tShift[startLine],
max: number = state.eMarks[startLine];
let nextLine: number = startLine + 1;
let startPos: number = 0, type: string, className: string = '',
is_numerable: boolean = false,
beginMarker: string = "{",
level = 1;
lineText = state.src.slice(pos, max).trim();
if (state.src.charCodeAt(pos) !== 0x5c /* \ */) {
return false;
}
const match: RegExpMatchArray = lineText
.slice(++startPos)
.match(/^(?:title|section|subsection|subsubsection)/);
if (!match) {
return false;
}
let attrStyle = '';
startPos += match[0].length;
switch (match[0]) {
case "title":
level = 1;
type = "title";
className = "main-title";
attrStyle = 'text-align: center; margin: 0 auto; line-height: 1.2; margin-bottom: 1em;';
break;
case "section":
level = 2;
type = "section";
is_numerable = true;
subsectionParentCount++;
isNewSect = true;
className = "section-title";
attrStyle = 'margin-top: 1.5em;';
break;
case "subsection":
isNewSubSection = true;
level = 3;
type = "subsection";
className = "sub_section-title";
break;
case "subsubsection":
level = 4;
type = "subsubsection";
className = "sub_sub_section-title";
break;
default:
break;
}
if (lineText[startPos] !== beginMarker) {
return false;
}
let { res = false, content = '' } = findEndMarker(lineText, startPos);
let resString = content;
resString = resString.split('\\\\').join('\n');
let hasEndMarker = false;
let last = nextLine;
if (!res) {
for (; nextLine <= endLine; nextLine++) {
if (lineText === '') {
break;
}
pos = state.bMarks[nextLine] + state.tShift[nextLine];
max = state.eMarks[nextLine];
lineText = state.src.slice(pos, max);
let { res = false, content = '' } = findEndMarker(lineText, -1, "{", "}", true);
if (res) {
resString += resString ? ' ' : '';
content = content.split('\\\\').join('\n');
resString += content;
hasEndMarker = true;
break
}
resString += resString ? ' ' : '';
lineText = lineText.split('\\\\').join('\n');
resString += lineText;
}
last = nextLine + 1;
} else {
hasEndMarker = true;
last = nextLine;
}
if ( !hasEndMarker ) {
return false;
}
state.line = last;
token = state.push('heading_open', 'h' + String(level), 1);
if (state.md.options.forLatex) {
token.latex = type;
}
token.markup = '########'.slice(0, level);
token.map = [startLine, state.line];
token.attrJoin('type', type);
token.attrJoin('class', className);
if (state.md.options?.forDocx && attrStyle) {
token.attrSet('style', attrStyle);
}
token = state.push('inline', '', 0);
token.content = resString;
token.type = type;
token.is_numerable = is_numerable;
token.map = [startLine, state.line];
let children = [];
state.md.inline.parse(token.content.trim(), state.md, state.env, children);
token.children = children;
if (type === "subsection") {
token.secNumber = subsectionParentCount;
token.isNewSect = isNewSect;
isNewSect = false
}
if (type === "subsubsection") {
token.secNumber = subsectionParentCount;
token.isNewSubSection = isNewSubSection;
isNewSubSection = false
}
token = state.push('heading_close', 'h' + String(level), -1);
token.markup = '########'.slice(0, level);
if (state.md.options.forLatex) {
token.latex = type;
}
return true;
};
const abstractBlock: RuleBlock = (state, startLine) => {
let isBlockOpened = false;
let token: Token;
let content: string;
let terminate: boolean;
const openTag: RegExp = /\\begin{abstract}/;
const closeTag: RegExp = /\\end{abstract}/;
let pos: number = state.bMarks[startLine] + state.tShift[startLine];
let max: number = state.eMarks[startLine];
let nextLine: number = startLine + 1;
const endLine: number = state.lineMax;
const terminatorRules = state.md.block.ruler.getRules('paragraph');
let lineText: string = state.src.slice(pos, max);
let isCloseTagExist = false;
if (!openTag.test(lineText)) {
return false;
}
let resString = '';
let abs = openTag.test(lineText);
for (; nextLine < endLine; nextLine++) {
if (closeTag.test(lineText)) {
lineText += '\\n';
break;
}
isBlockOpened = true;
if (lineText === '') {
resString += '\n'
}
pos = state.bMarks[nextLine] + state.tShift[nextLine];
max = state.eMarks[nextLine];
lineText = state.src.slice(pos, max);
if (abs) {
if (closeTag.test(lineText)) {
isBlockOpened = false;
abs = false;
isCloseTagExist = true;
} else {
resString += resString ? ' ' : '';
resString += lineText;
}
} else {
if (state.isEmpty(nextLine)) { break }
}
if (openTag.test(lineText)) {
if (isBlockOpened) {
return false;
}
}
// this would be a code block normally, but after paragraph
// it's considered a lazy continuation regardless of what's there
if (state.sCount[nextLine] - state.blkIndent > 3) { continue; }
// quirk for blockquotes, this line should already be checked by that rule
if (state.sCount[nextLine] < 0) { continue; }
// Some tags can terminate paragraph without empty line.
terminate = false;
for (let i = 0, l = terminatorRules.length; i < l; i++) {
if (terminatorRules[i](state, nextLine, endLine, true)) {
terminate = true;
break;
}
}
if (terminate) { break; }
}
if (!isCloseTagExist) {
return false;
}
content = resString;
const contentList = content.split('\n');
const tokenContent = contentList.filter(item => {
return item.trim().length > 0
});
token = state.push('paragraph_open', 'div', 1);
token.map = [startLine, nextLine];
token.attrSet('class', 'abstract');
token.attrSet('style', 'width: 80%; margin: 0 auto; margin-bottom: 1em; font-size: .9em;');
token = state.push('inline', '', 0);
token.content = getAbstractTemplate(tokenContent.join(`</p><p style="text-indent: 1em">`));
token.map = [startLine, state.line];
token.children = [];
token = state.push('paragraph_close', 'div', -1);
state.line = nextLine;
return true;
};
const findEndMarker = (str: string, startPos: number = 0, beginMarker: string = "{", endMarker: string = "}", onlyEnd = false) => {
let content: string = '';
let nextPos: number = 0;
if ( str[startPos] !== beginMarker && !onlyEnd ) {
return { res: false }
}
let openBrackets = 1;
let openCode = 0;
for (let i = startPos + 1; i < str.length; i++) {
const chr = str[i];
nextPos = i;
if ( chr === '`') {
if (openCode > 0) {
openCode--;
} else {
openCode++;
}
}
if ( chr === beginMarker && openCode === 0) {
content += chr;
openBrackets++;
continue;
}
if ( chr === endMarker && openCode === 0) {
openBrackets--;
if (openBrackets > 0) {
content += chr;
continue;
}
break;
}
content += chr;
}
if ( openBrackets > 0 ) {
return {
res: false,
content: content
}
}
return {
res: true,
content: content,
nextPos: nextPos + endMarker.length
};
};
const textAuthor: RuleInline = (state) => {
let startPos = state.pos;
if (state.src.charCodeAt(startPos) !== 0x5c /* \ */) {
return false;
}
const match = state.src
.slice(++startPos)
.match(/^(?:author)/); // eslint-disable-line
if (!match) {
return false;
}
startPos += match[0].length;
let {res = false, content = '', nextPos = 0 } = findEndMarker(state.src, startPos);
if ( !res ) {
return false;
}
const type = "author";
const arrtStyle = 'text-align: center; margin: 0 auto; display: flex; justify-content: center; flex-wrap: wrap;';
const token = state.push(type, "", 0);
if (state.md.options?.forDocx && arrtStyle) {
token.attrSet('style', arrtStyle);
}
token.content = content;
token.children = [];
const columns = content.split('\\and');
for (let i = 0; i < columns.length; i++) {
let column = columns[i]
? columns[i].trim()
: '';
const tokenAuthorColumn: Token = {};
tokenAuthorColumn.type = 'author_column';
tokenAuthorColumn.content = column;
tokenAuthorColumn.children = [];
let colArr = column.split('\\\\');
if (colArr && colArr.length) {
for ( let j = 0; j < colArr.length; j++ ) {
let item = colArr[j] ? colArr[j].trim() : '';
let arrItem = item.split('\n');
arrItem = arrItem.map(item => item.trim());
item = arrItem.join(' ');
const newToken: Token = {};
newToken.type = 'author_item';
newToken.content = item;
let children = [];
state.md.inline.parse(item, state.md, state.env, children);
newToken.children = children;
tokenAuthorColumn.children.push(newToken);
}
}
token.children.push(tokenAuthorColumn)
}
state.pos = nextPos;
return true;
};
const textTypes: RuleInline = (state) => {
let startPos = state.pos;
let type: string = '';
let arrtStyle: string = '';
if (state.src.charCodeAt(startPos) !== 0x5c /* \ */) {
return false;
}
const match = state.src
.slice(++startPos)
.match(/^(?:textit|textbf|texttt)/); // eslint-disable-line
if (!match) {
return false;
}
startPos += match[0].length;
switch (match[0]) {
case "textit":
type = "textit";
break;
case "textbf":
type = "textbf";
break;
case "texttt":
type = "texttt";
break;
default:
break;
}
if (!type || type === '') {
return false;
}
let {res = false, content = '', nextPos = 0 } = findEndMarker(state.src, startPos);
if ( !res ) {
return false;
}
state.push(type + '_open', "", 0);
const token = state.push(type, "", 0);
if (state.md.options?.forDocx && arrtStyle) {
token.attrSet('style', arrtStyle);
}
token.content = content;
token.children = [];
let children = [];
state.md.inline.parse(token.content.trim(), state.md, state.env, children);
token.children = children;
state.push(type + '_close', "", 0);
state.pos = nextPos;
return true;
};
const linkifyURL: RuleInline = (state) => {
const urlTag: RegExp = /(?:(www|http:|https:)+[^\s]+[\w])/;
let startPos = state.pos;
let
beginMarker: string = "{",
endMarker: string = "}";
if (state.src.charCodeAt(startPos) !== 0x5c /* \ */) {
return false;
}
const match = state.src
.slice(++startPos)
.match(/^(?:url)/); // eslint-disable-line
if (!match) {
return false;
}
startPos += match[0].length;
if (state.src[startPos] !== beginMarker) {
return false;
}
const endMarkerPos = state.src.indexOf(endMarker, startPos);
if (endMarkerPos === -1) {
return false;
}
const nextPos = endMarkerPos + endMarker.length;
let token;
const text = state.src.slice(startPos + 1, nextPos - endMarker.length);
if (!text || text.trim().length === 0) {
state.pos = nextPos;
return true;
}
if (!state.md.linkify.test(text) || !urlTag.test(text)) {
token = state.push('textUrl', '', 0);
token.content = text;
state.pos = nextPos;
return true;
}
const links = state.md.linkify.match(text);
let level = 1;
let lastPos = 0;
let pos;
state.md.options.linkify = false
for (let ln = 0; ln < links.length; ln++) {
const url = links[ln].url;
const fullUrl = state.md.normalizeLink(url);
if (!state.md.validateLink(fullUrl)) { continue; }
let urlText = links[ln].text;
if (!urlTag.test(urlText)) {
pos = links[ln].index;
if (pos > lastPos) {
token = state.push('textUrl', '', 0);
token.content = text.slice(lastPos, pos);
token.level = level;
}
token = state.push('textUrl', '', 0);
lastPos = links[ln].lastIndex;
token.content = text.slice(pos, lastPos);
token.level = level;
continue;
}
if (!links[ln].schema) {
urlText = state.md.normalizeLinkText('http://' + urlText).replace(/^http:\/\//, '');
} else if (links[ln].schema === 'mailto:' && !/^mailto:/i.test(urlText)) {
urlText = state.md.normalizeLinkText('mailto:' + urlText).replace(/^mailto:/, '');
} else {
urlText = state.md.normalizeLinkText(urlText);
}
pos = links[ln].index;
if (pos > lastPos) {
token = state.push('textUrl', '', 0);
token.content = text.slice(lastPos, pos);
token.level = level;
}
token = state.push('link_open', 'a', 1);
token.attrs = [ [ 'href', fullUrl ] ];
token.level = level++;
token.markup = 'linkify';
token.info = 'auto';
token = state.push('text', '', 0);
token.content = urlText;
token.level = level;
token = state.push('link_close', 'a', -1);
token.level = --level;
token.markup = 'linkify';
token.info = 'auto';
lastPos = links[ln].lastIndex;
}
if (lastPos < text.length) {
token = state.push('textUrl', '', 0);
token.content = text.slice(lastPos);
token.level = level;
}
state.pos = nextPos;
return true;
};
const renderDocTitle: Renderer = (tokens, index, options, env, slf) => {
const token = tokens[index];
const content = renderInlineContent(token, options, env, slf);
return content;
};
const renderInlineContent = (token, options, env, slf) => {
let sContent = '';
let content = '';
if (token.children && token.children.length) {
for (let i = 0; i < token.children.length; i++) {
const tok = token.children[i];
if (tok.children && tok.children.length > 1) {
if (tok.type === "tabular_inline") {
content = renderTabularInline(token.children, tok, options, env, slf)
} else {
content = slf.renderInline(tok.children, options, env);
}
} else {
content = slf.renderInline([tok], options, env);
}
sContent += content
}
return sContent;
}
return token.content;
};
const renderSectionTitle: Renderer = (tokens, index, options, env, slf) => {
const token = tokens[index];
const sectionNumber = token.is_numerable
? `<span class="section-number">${++sectionCount}. </span>`
: ``;
const content = renderInlineContent(token, options, env, slf);
return `${sectionNumber}${content}`
};
const renderSubsectionTitle: Renderer = (tokens, index, options, env, slf) => {
const token = tokens[index];
if (token.isNewSect) {
subCount = 0;
}
const content = renderInlineContent(token, options, env, slf);
return `<span class="section-number">${token.secNumber}.</span><span class="sub_section-number">${++subCount}.</span> ${content}`
};
const renderSubSubsectionTitle: Renderer = (tokens, index, options, env, slf) => {
const token = tokens[index];
if (token.isNewSubSection) {
subSubCount = 0;
}
const content = renderInlineContent(token, options, env, slf);
return `<span class="section-number">${token.secNumber}.</span><span class="sub_section-number">${subCount}.${++subSubCount}.</span> ${content}`
};
const getAuthorItemToken = (tokens, index, options, env, slf) => {
let res = '';
const token = tokens[index];
const content = renderInlineContent(token, options, env, slf);
let attrStyle = options.forDocx
? ' display: block; text-align: center;'
: '';
res += attrStyle
? `<span style="${attrStyle}">${content}</span>`
: `<span>${content}</span>`;
return res;
};
const getAuthorColumnToken = (tokens, index, options, env, slf) => {
let res = '';
const token = tokens[index];
let attrStyle = options.forDocx
? 'min-width: 30%; max-width: 50%; padding: 0 7px;'
: '';
const content: string = token.children && token.children.length
? slf.renderInline(token.children, options)
: renderInlineContent(token, options, env, slf);
if (attrStyle) {
res += `<p style="${attrStyle}">${content}</p>`
} else {
res += `<p>${content}</p>`
}
return res;
};
const renderAuthorToken: Renderer = (tokens, index, options, env, slf) => {
const token = tokens[index];
let divStyle: string = options.forDocx
? token.attrGet('style')
: '';
const res: string = token.children && token.children.length
? slf.renderInline(token.children, options)
: renderInlineContent(token, options, env, slf);
if (divStyle) {
return `<div class="author" style="${divStyle}">
${res}
</div>`;
} else {
return `<div class="author">
${res}
</div>`;
}
};
const renderBoldText = (tokens, idx, options, env, slf) => {
const token = tokens[idx];
const content = renderInlineContent(token, options, env, slf);
return content;
};
const renderItalicText = (tokens, idx, options, env, slf) => {
const token = tokens[idx];
const content = renderInlineContent(token, options, env, slf);
return content;
};
const renderCodeInlineOpen = (tokens, idx, options, env, slf) => {
const token = tokens[idx];
return '<code' + slf.renderAttrs(token) + '>';
};
const renderCodeInlineClose = () => {
return '</code>';
};
const renderCodeInline = (tokens, idx, options, env, slf) => {
const token = tokens[idx];
// return escapeHtml(token.content);
const content = renderInlineContent(token, options, env, slf);
return content;
};
const renderUrl = token => `<a href="${token.content}">${token.content}</a>`;
const renderTextUrl = token => {
return `<a href="#" class="text-url">${token.content}</a>`
};
const mappingTextStyles = {
textbf: "TextBold",
textbf_open: "TextBoldOpen",
textbf_close: "TextBoldClose",
textit: "TextIt",
textit_open: "TextItOpen",
textit_close: "TextItClose",
texttt: "texttt",
texttt_open: "texttt_open",
texttt_close: "texttt_close",
};
const mapping = {
section: "Section",
title: "Title",
author: "Author",
author_column: "authorColumn",
author_item: "authorItem",
subsection: "Subsection",
subsubsection: "Subsubsection",
url: "Url",
textUrl: "textUrl"
};
export default () => {
return (md: MarkdownIt) => {
resetCounter();
md.block.ruler.before("heading", "headingSection", headingSection);
md.block.ruler.before("paragraphDiv", "abstractBlock", abstractBlock);
md.inline.ruler.before("multiMath", "textTypes", textTypes);
md.inline.ruler.before("textTypes", "textAuthor", textAuthor);
md.inline.ruler.before('textTypes', 'linkifyURL', linkifyURL);
Object.keys(mappingTextStyles).forEach(key => {
md.renderer.rules[key] = (tokens, idx, options, env, slf) => {
switch (tokens[idx].type) {
case "textbf":
return renderBoldText(tokens, idx, options, env, slf);
case "textbf_open":
return '<strong>';
case "textbf_close":
return '</strong>';
case "textit":
return renderItalicText(tokens, idx, options, env, slf);
case "textit_open":
return '<em>';
case "textit_close":
return '</em>';
case "texttt":
return renderCodeInline(tokens, idx, options, env, slf);
case "texttt_open":
return renderCodeInlineOpen(tokens, idx, options, env, slf);
case "texttt_close":
return renderCodeInlineClose();
default:
return '';
}
}
});
Object.keys(mapping).forEach(key => {
md.renderer.rules[key] = (tokens, idx, options, env = {}, slf) => {
switch (tokens[idx].type) {
case "section":
return renderSectionTitle(tokens, idx, options, env, slf);
case "subsection":
return renderSubsectionTitle(tokens, idx, options, env, slf);
case "subsubsection":
return renderSubSubsectionTitle(tokens, idx, options, env, slf);
case "title":
return renderDocTitle(tokens, idx, options, env, slf);
case "author":
return renderAuthorToken(tokens, idx, options, env, slf);
case "author_column":
return getAuthorColumnToken(tokens, idx, options, env, slf);
case "author_item":
return getAuthorItemToken(tokens, idx, options, env, slf);
case "url":
return renderUrl(tokens[idx]);
case "textUrl":
return renderTextUrl(tokens[idx]);
default:
return '';
}
}
});
md.renderer.rules.s_open = function (tokens, idx, options, env, self) {
let i = 0;
while ((idx + i) < tokens.length && tokens[idx + i].type !== 's_close') {
const token = tokens[idx+i];
token.attrSet('style', 'text-decoration: line-through; color: inherit;');
i++;
}
return self.renderToken(tokens, idx, options)
};
md.renderer.rules.link_open = function (tokens, idx, options, env, self) {
if (options.openLinkInNewWindow) {
tokens[idx].attrPush(['target', '_blank']);
tokens[idx].attrPush(['rel', 'noopener']);
} else {
tokens[idx].attrPush(['target', '_self']);
}
if (!tokens[idx + 1] || !tokens[idx + 1].content) {
tokens[idx].attrPush([
'style', 'word-break: break-word'
]);
return self.renderToken(tokens, idx, options)
}
if (tokens[idx + 1].content.length > 40 && !tokens[idx + 1].content.includes(' ')) {
tokens[idx].attrPush([
'style', 'word-break: break-all'
]);
} else if (!tokens[idx + 1].content.includes(' ')) {
tokens[idx].attrPush([
'style', 'display: inline-block'
]);
} else {
tokens[idx].attrPush([
'style', 'word-break: break-word'
]);
}
return self.renderToken(tokens, idx, options)
}
};
}; | the_stack |
export declare enum MATypes {
SMA = 0,
EMA = 1,
WMA = 2,
DEMA = 3,
TEMA = 4,
TRIMA = 5,
KAMA = 6,
MAMA = 7,
T3 = 8
}
export declare interface Record {
Time: number;
Open: number;
High: number;
Low: number;
Close: number;
Volume: number;
}
/**
* ACCBANDS - Acceleration Bands
*
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [optTime_Period=20] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {[number[], number[], number[]]} - [outRealUpperBand, outRealMiddleBand, outRealLowerBand]
*/
export declare function ACCBANDS(
inHigh: number[],
inLow: number[],
inClose: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): [number[], number[], number[]];
/**
* ACCBANDS - Acceleration Bands
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [optTime_Period=20] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {[number[], number[], number[]]} - [outRealUpperBand, outRealMiddleBand, outRealLowerBand]
*/
export declare function ACCBANDS(
inRecords: Record[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): [number[], number[], number[]];
/**
* ACOS - Vector Trigonometric ACos
*
* @param {number[]} inReal - Double Array
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function ACOS(
inReal: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* ACOS - Vector Trigonometric ACos
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function ACOS(
inRecords: Record[],
inRealName: string,
startIdx?: number,
endIdx?: number
): number[];
/**
* AD - Chaikin A/D Line
*
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number[]} inVolume - Volume
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function AD(
inHigh: number[],
inLow: number[],
inClose: number[],
inVolume: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* AD - Chaikin A/D Line
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function AD(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* ADD - Vector Arithmetic Add
*
* @param {number[]} inReal0 - Double Array
* @param {number[]} inReal1 - Double Array
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function ADD(
inReal0: number[],
inReal1: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* ADD - Vector Arithmetic Add
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inReal0Name - The field name to extract from `inRecords`
* @param {string} inReal1Name - The field name to extract from `inRecords`
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function ADD(
inRecords: Record[],
inReal0Name: string,
inReal1Name: string,
startIdx?: number,
endIdx?: number
): number[];
/**
* ADOSC - Chaikin A/D Oscillator
*
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number[]} inVolume - Volume
* @param {number} [optFast_Period=3] - Number of period for the fast MA
* @param {number} [optSlow_Period=10] - Number of period for the slow MA
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function ADOSC(
inHigh: number[],
inLow: number[],
inClose: number[],
inVolume: number[],
optFast_Period?: number,
optSlow_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* ADOSC - Chaikin A/D Oscillator
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [optFast_Period=3] - Number of period for the fast MA
* @param {number} [optSlow_Period=10] - Number of period for the slow MA
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function ADOSC(
inRecords: Record[],
optFast_Period?: number,
optSlow_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* ADX - Average Directional Movement Index
*
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function ADX(
inHigh: number[],
inLow: number[],
inClose: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* ADX - Average Directional Movement Index
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function ADX(
inRecords: Record[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* ADXR - Average Directional Movement Index Rating
*
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function ADXR(
inHigh: number[],
inLow: number[],
inClose: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* ADXR - Average Directional Movement Index Rating
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function ADXR(
inRecords: Record[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* APO - Absolute Price Oscillator
*
* @param {number[]} inReal - Double Array
* @param {number} [optFast_Period=12] - Number of period for the fast MA
* @param {number} [optSlow_Period=26] - Number of period for the slow MA
* @param {MATypes} [optMA_Type=SMA] - Type of Moving Average
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function APO(
inReal: number[],
optFast_Period?: number,
optSlow_Period?: number,
optMA_Type?: MATypes,
startIdx?: number,
endIdx?: number
): number[];
/**
* APO - Absolute Price Oscillator
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optFast_Period=12] - Number of period for the fast MA
* @param {number} [optSlow_Period=26] - Number of period for the slow MA
* @param {MATypes} [optMA_Type=SMA] - Type of Moving Average
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function APO(
inRecords: Record[],
inRealName: string,
optFast_Period?: number,
optSlow_Period?: number,
optMA_Type?: MATypes,
startIdx?: number,
endIdx?: number
): number[];
/**
* AROON - Aroon
*
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {[number[], number[]]} - [outAroonDown, outAroonUp]
*/
export declare function AROON(
inHigh: number[],
inLow: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): [number[], number[]];
/**
* AROON - Aroon
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {[number[], number[]]} - [outAroonDown, outAroonUp]
*/
export declare function AROON(
inRecords: Record[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): [number[], number[]];
/**
* AROONOSC - Aroon Oscillator
*
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function AROONOSC(
inHigh: number[],
inLow: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* AROONOSC - Aroon Oscillator
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function AROONOSC(
inRecords: Record[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* ASIN - Vector Trigonometric ASin
*
* @param {number[]} inReal - Double Array
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function ASIN(
inReal: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* ASIN - Vector Trigonometric ASin
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function ASIN(
inRecords: Record[],
inRealName: string,
startIdx?: number,
endIdx?: number
): number[];
/**
* ATAN - Vector Trigonometric ATan
*
* @param {number[]} inReal - Double Array
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function ATAN(
inReal: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* ATAN - Vector Trigonometric ATan
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function ATAN(
inRecords: Record[],
inRealName: string,
startIdx?: number,
endIdx?: number
): number[];
/**
* ATR - Average True Range
*
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function ATR(
inHigh: number[],
inLow: number[],
inClose: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* ATR - Average True Range
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function ATR(
inRecords: Record[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* AVGPRICE - Average Price
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function AVGPRICE(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* AVGPRICE - Average Price
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function AVGPRICE(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* AVGDEV - Average Deviation
*
* @param {number[]} inReal - Double Array
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function AVGDEV(
inReal: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* AVGDEV - Average Deviation
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function AVGDEV(
inRecords: Record[],
inRealName: string,
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* BBANDS - Bollinger Bands
*
* @param {number[]} inReal - Double Array
* @param {number} [optTime_Period=5] - Number of period
* @param {number} [optDeviations_up=2.000000e+0] - Deviation multiplier for upper band
* @param {number} [optDeviations_down=2.000000e+0] - Deviation multiplier for lower band
* @param {MATypes} [optMA_Type=SMA] - Type of Moving Average
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {[number[], number[], number[]]} - [outRealUpperBand, outRealMiddleBand, outRealLowerBand]
*/
export declare function BBANDS(
inReal: number[],
optTime_Period?: number,
optDeviations_up?: number,
optDeviations_down?: number,
optMA_Type?: MATypes,
startIdx?: number,
endIdx?: number
): [number[], number[], number[]];
/**
* BBANDS - Bollinger Bands
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optTime_Period=5] - Number of period
* @param {number} [optDeviations_up=2.000000e+0] - Deviation multiplier for upper band
* @param {number} [optDeviations_down=2.000000e+0] - Deviation multiplier for lower band
* @param {MATypes} [optMA_Type=SMA] - Type of Moving Average
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {[number[], number[], number[]]} - [outRealUpperBand, outRealMiddleBand, outRealLowerBand]
*/
export declare function BBANDS(
inRecords: Record[],
inRealName: string,
optTime_Period?: number,
optDeviations_up?: number,
optDeviations_down?: number,
optMA_Type?: MATypes,
startIdx?: number,
endIdx?: number
): [number[], number[], number[]];
/**
* BETA - Beta
*
* @param {number[]} inReal0 - Double Array
* @param {number[]} inReal1 - Double Array
* @param {number} [optTime_Period=5] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function BETA(
inReal0: number[],
inReal1: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* BETA - Beta
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inReal0Name - The field name to extract from `inRecords`
* @param {string} inReal1Name - The field name to extract from `inRecords`
* @param {number} [optTime_Period=5] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function BETA(
inRecords: Record[],
inReal0Name: string,
inReal1Name: string,
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* BOP - Balance Of Power
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function BOP(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* BOP - Balance Of Power
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function BOP(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CCI - Commodity Channel Index
*
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function CCI(
inHigh: number[],
inLow: number[],
inClose: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* CCI - Commodity Channel Index
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function CCI(
inRecords: Record[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* CDL2CROWS - Two Crows
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDL2CROWS(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDL2CROWS - Two Crows
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDL2CROWS(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDL3BLACKCROWS - Three Black Crows
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDL3BLACKCROWS(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDL3BLACKCROWS - Three Black Crows
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDL3BLACKCROWS(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDL3INSIDE - Three Inside Up/Down
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDL3INSIDE(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDL3INSIDE - Three Inside Up/Down
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDL3INSIDE(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDL3LINESTRIKE - Three-Line Strike
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDL3LINESTRIKE(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDL3LINESTRIKE - Three-Line Strike
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDL3LINESTRIKE(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDL3OUTSIDE - Three Outside Up/Down
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDL3OUTSIDE(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDL3OUTSIDE - Three Outside Up/Down
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDL3OUTSIDE(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDL3STARSINSOUTH - Three Stars In The South
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDL3STARSINSOUTH(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDL3STARSINSOUTH - Three Stars In The South
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDL3STARSINSOUTH(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDL3WHITESOLDIERS - Three Advancing White Soldiers
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDL3WHITESOLDIERS(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDL3WHITESOLDIERS - Three Advancing White Soldiers
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDL3WHITESOLDIERS(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLABANDONEDBABY - Abandoned Baby
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [optPenetration=3.000000e-1] - Percentage of penetration of a candle within another candle
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLABANDONEDBABY(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
optPenetration?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLABANDONEDBABY - Abandoned Baby
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [optPenetration=3.000000e-1] - Percentage of penetration of a candle within another candle
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLABANDONEDBABY(
inRecords: Record[],
optPenetration?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLADVANCEBLOCK - Advance Block
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLADVANCEBLOCK(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLADVANCEBLOCK - Advance Block
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLADVANCEBLOCK(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLBELTHOLD - Belt-hold
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLBELTHOLD(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLBELTHOLD - Belt-hold
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLBELTHOLD(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLBREAKAWAY - Breakaway
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLBREAKAWAY(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLBREAKAWAY - Breakaway
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLBREAKAWAY(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLCLOSINGMARUBOZU - Closing Marubozu
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLCLOSINGMARUBOZU(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLCLOSINGMARUBOZU - Closing Marubozu
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLCLOSINGMARUBOZU(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLCONCEALBABYSWALL - Concealing Baby Swallow
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLCONCEALBABYSWALL(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLCONCEALBABYSWALL - Concealing Baby Swallow
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLCONCEALBABYSWALL(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLCOUNTERATTACK - Counterattack
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLCOUNTERATTACK(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLCOUNTERATTACK - Counterattack
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLCOUNTERATTACK(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLDARKCLOUDCOVER - Dark Cloud Cover
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [optPenetration=5.000000e-1] - Percentage of penetration of a candle within another candle
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLDARKCLOUDCOVER(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
optPenetration?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLDARKCLOUDCOVER - Dark Cloud Cover
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [optPenetration=5.000000e-1] - Percentage of penetration of a candle within another candle
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLDARKCLOUDCOVER(
inRecords: Record[],
optPenetration?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLDOJI - Doji
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLDOJI(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLDOJI - Doji
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLDOJI(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLDOJISTAR - Doji Star
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLDOJISTAR(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLDOJISTAR - Doji Star
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLDOJISTAR(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLDRAGONFLYDOJI - Dragonfly Doji
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLDRAGONFLYDOJI(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLDRAGONFLYDOJI - Dragonfly Doji
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLDRAGONFLYDOJI(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLENGULFING - Engulfing Pattern
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLENGULFING(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLENGULFING - Engulfing Pattern
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLENGULFING(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLEVENINGDOJISTAR - Evening Doji Star
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [optPenetration=3.000000e-1] - Percentage of penetration of a candle within another candle
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLEVENINGDOJISTAR(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
optPenetration?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLEVENINGDOJISTAR - Evening Doji Star
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [optPenetration=3.000000e-1] - Percentage of penetration of a candle within another candle
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLEVENINGDOJISTAR(
inRecords: Record[],
optPenetration?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLEVENINGSTAR - Evening Star
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [optPenetration=3.000000e-1] - Percentage of penetration of a candle within another candle
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLEVENINGSTAR(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
optPenetration?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLEVENINGSTAR - Evening Star
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [optPenetration=3.000000e-1] - Percentage of penetration of a candle within another candle
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLEVENINGSTAR(
inRecords: Record[],
optPenetration?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLGAPSIDESIDEWHITE - Up/Down-gap side-by-side white lines
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLGAPSIDESIDEWHITE(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLGAPSIDESIDEWHITE - Up/Down-gap side-by-side white lines
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLGAPSIDESIDEWHITE(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLGRAVESTONEDOJI - Gravestone Doji
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLGRAVESTONEDOJI(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLGRAVESTONEDOJI - Gravestone Doji
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLGRAVESTONEDOJI(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLHAMMER - Hammer
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLHAMMER(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLHAMMER - Hammer
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLHAMMER(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLHANGINGMAN - Hanging Man
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLHANGINGMAN(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLHANGINGMAN - Hanging Man
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLHANGINGMAN(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLHARAMI - Harami Pattern
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLHARAMI(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLHARAMI - Harami Pattern
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLHARAMI(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLHARAMICROSS - Harami Cross Pattern
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLHARAMICROSS(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLHARAMICROSS - Harami Cross Pattern
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLHARAMICROSS(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLHIGHWAVE - High-Wave Candle
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLHIGHWAVE(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLHIGHWAVE - High-Wave Candle
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLHIGHWAVE(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLHIKKAKE - Hikkake Pattern
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLHIKKAKE(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLHIKKAKE - Hikkake Pattern
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLHIKKAKE(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLHIKKAKEMOD - Modified Hikkake Pattern
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLHIKKAKEMOD(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLHIKKAKEMOD - Modified Hikkake Pattern
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLHIKKAKEMOD(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLHOMINGPIGEON - Homing Pigeon
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLHOMINGPIGEON(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLHOMINGPIGEON - Homing Pigeon
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLHOMINGPIGEON(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLIDENTICAL3CROWS - Identical Three Crows
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLIDENTICAL3CROWS(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLIDENTICAL3CROWS - Identical Three Crows
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLIDENTICAL3CROWS(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLINNECK - In-Neck Pattern
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLINNECK(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLINNECK - In-Neck Pattern
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLINNECK(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLINVERTEDHAMMER - Inverted Hammer
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLINVERTEDHAMMER(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLINVERTEDHAMMER - Inverted Hammer
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLINVERTEDHAMMER(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLKICKING - Kicking
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLKICKING(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLKICKING - Kicking
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLKICKING(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLKICKINGBYLENGTH - Kicking - bull/bear determined by the longer marubozu
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLKICKINGBYLENGTH(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLKICKINGBYLENGTH - Kicking - bull/bear determined by the longer marubozu
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLKICKINGBYLENGTH(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLLADDERBOTTOM - Ladder Bottom
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLLADDERBOTTOM(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLLADDERBOTTOM - Ladder Bottom
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLLADDERBOTTOM(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLLONGLEGGEDDOJI - Long Legged Doji
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLLONGLEGGEDDOJI(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLLONGLEGGEDDOJI - Long Legged Doji
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLLONGLEGGEDDOJI(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLLONGLINE - Long Line Candle
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLLONGLINE(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLLONGLINE - Long Line Candle
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLLONGLINE(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLMARUBOZU - Marubozu
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLMARUBOZU(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLMARUBOZU - Marubozu
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLMARUBOZU(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLMATCHINGLOW - Matching Low
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLMATCHINGLOW(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLMATCHINGLOW - Matching Low
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLMATCHINGLOW(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLMATHOLD - Mat Hold
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [optPenetration=5.000000e-1] - Percentage of penetration of a candle within another candle
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLMATHOLD(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
optPenetration?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLMATHOLD - Mat Hold
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [optPenetration=5.000000e-1] - Percentage of penetration of a candle within another candle
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLMATHOLD(
inRecords: Record[],
optPenetration?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLMORNINGDOJISTAR - Morning Doji Star
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [optPenetration=3.000000e-1] - Percentage of penetration of a candle within another candle
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLMORNINGDOJISTAR(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
optPenetration?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLMORNINGDOJISTAR - Morning Doji Star
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [optPenetration=3.000000e-1] - Percentage of penetration of a candle within another candle
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLMORNINGDOJISTAR(
inRecords: Record[],
optPenetration?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLMORNINGSTAR - Morning Star
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [optPenetration=3.000000e-1] - Percentage of penetration of a candle within another candle
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLMORNINGSTAR(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
optPenetration?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLMORNINGSTAR - Morning Star
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [optPenetration=3.000000e-1] - Percentage of penetration of a candle within another candle
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLMORNINGSTAR(
inRecords: Record[],
optPenetration?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLONNECK - On-Neck Pattern
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLONNECK(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLONNECK - On-Neck Pattern
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLONNECK(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLPIERCING - Piercing Pattern
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLPIERCING(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLPIERCING - Piercing Pattern
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLPIERCING(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLRICKSHAWMAN - Rickshaw Man
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLRICKSHAWMAN(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLRICKSHAWMAN - Rickshaw Man
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLRICKSHAWMAN(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLRISEFALL3METHODS - Rising/Falling Three Methods
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLRISEFALL3METHODS(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLRISEFALL3METHODS - Rising/Falling Three Methods
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLRISEFALL3METHODS(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLSEPARATINGLINES - Separating Lines
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLSEPARATINGLINES(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLSEPARATINGLINES - Separating Lines
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLSEPARATINGLINES(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLSHOOTINGSTAR - Shooting Star
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLSHOOTINGSTAR(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLSHOOTINGSTAR - Shooting Star
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLSHOOTINGSTAR(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLSHORTLINE - Short Line Candle
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLSHORTLINE(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLSHORTLINE - Short Line Candle
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLSHORTLINE(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLSPINNINGTOP - Spinning Top
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLSPINNINGTOP(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLSPINNINGTOP - Spinning Top
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLSPINNINGTOP(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLSTALLEDPATTERN - Stalled Pattern
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLSTALLEDPATTERN(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLSTALLEDPATTERN - Stalled Pattern
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLSTALLEDPATTERN(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLSTICKSANDWICH - Stick Sandwich
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLSTICKSANDWICH(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLSTICKSANDWICH - Stick Sandwich
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLSTICKSANDWICH(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLTAKURI - Takuri (Dragonfly Doji with very long lower shadow)
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLTAKURI(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLTAKURI - Takuri (Dragonfly Doji with very long lower shadow)
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLTAKURI(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLTASUKIGAP - Tasuki Gap
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLTASUKIGAP(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLTASUKIGAP - Tasuki Gap
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLTASUKIGAP(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLTHRUSTING - Thrusting Pattern
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLTHRUSTING(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLTHRUSTING - Thrusting Pattern
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLTHRUSTING(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLTRISTAR - Tristar Pattern
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLTRISTAR(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLTRISTAR - Tristar Pattern
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLTRISTAR(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLUNIQUE3RIVER - Unique 3 River
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLUNIQUE3RIVER(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLUNIQUE3RIVER - Unique 3 River
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLUNIQUE3RIVER(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLUPSIDEGAP2CROWS - Upside Gap Two Crows
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLUPSIDEGAP2CROWS(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLUPSIDEGAP2CROWS - Upside Gap Two Crows
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLUPSIDEGAP2CROWS(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLXSIDEGAP3METHODS - Upside/Downside Gap Three Methods
*
* @param {number[]} inOpen - Open
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLXSIDEGAP3METHODS(
inOpen: number[],
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CDLXSIDEGAP3METHODS - Upside/Downside Gap Three Methods
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function CDLXSIDEGAP3METHODS(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CEIL - Vector Ceil
*
* @param {number[]} inReal - Double Array
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function CEIL(
inReal: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* CEIL - Vector Ceil
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function CEIL(
inRecords: Record[],
inRealName: string,
startIdx?: number,
endIdx?: number
): number[];
/**
* CMO - Chande Momentum Oscillator
*
* @param {number[]} inReal - Double Array
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function CMO(
inReal: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* CMO - Chande Momentum Oscillator
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function CMO(
inRecords: Record[],
inRealName: string,
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* CORREL - Pearson's Correlation Coefficient (r)
*
* @param {number[]} inReal0 - Double Array
* @param {number[]} inReal1 - Double Array
* @param {number} [optTime_Period=30] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function CORREL(
inReal0: number[],
inReal1: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* CORREL - Pearson's Correlation Coefficient (r)
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inReal0Name - The field name to extract from `inRecords`
* @param {string} inReal1Name - The field name to extract from `inRecords`
* @param {number} [optTime_Period=30] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function CORREL(
inRecords: Record[],
inReal0Name: string,
inReal1Name: string,
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* COS - Vector Trigonometric Cos
*
* @param {number[]} inReal - Double Array
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function COS(
inReal: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* COS - Vector Trigonometric Cos
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function COS(
inRecords: Record[],
inRealName: string,
startIdx?: number,
endIdx?: number
): number[];
/**
* COSH - Vector Trigonometric Cosh
*
* @param {number[]} inReal - Double Array
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function COSH(
inReal: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* COSH - Vector Trigonometric Cosh
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function COSH(
inRecords: Record[],
inRealName: string,
startIdx?: number,
endIdx?: number
): number[];
/**
* DEMA - Double Exponential Moving Average
*
* @param {number[]} inReal - Double Array
* @param {number} [optTime_Period=30] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function DEMA(
inReal: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* DEMA - Double Exponential Moving Average
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optTime_Period=30] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function DEMA(
inRecords: Record[],
inRealName: string,
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* DIV - Vector Arithmetic Div
*
* @param {number[]} inReal0 - Double Array
* @param {number[]} inReal1 - Double Array
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function DIV(
inReal0: number[],
inReal1: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* DIV - Vector Arithmetic Div
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inReal0Name - The field name to extract from `inRecords`
* @param {string} inReal1Name - The field name to extract from `inRecords`
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function DIV(
inRecords: Record[],
inReal0Name: string,
inReal1Name: string,
startIdx?: number,
endIdx?: number
): number[];
/**
* DX - Directional Movement Index
*
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function DX(
inHigh: number[],
inLow: number[],
inClose: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* DX - Directional Movement Index
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function DX(
inRecords: Record[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* EMA - Exponential Moving Average
*
* @param {number[]} inReal - Double Array
* @param {number} [optTime_Period=30] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function EMA(
inReal: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* EMA - Exponential Moving Average
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optTime_Period=30] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function EMA(
inRecords: Record[],
inRealName: string,
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* EXP - Vector Arithmetic Exp
*
* @param {number[]} inReal - Double Array
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function EXP(
inReal: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* EXP - Vector Arithmetic Exp
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function EXP(
inRecords: Record[],
inRealName: string,
startIdx?: number,
endIdx?: number
): number[];
/**
* FLOOR - Vector Floor
*
* @param {number[]} inReal - Double Array
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function FLOOR(
inReal: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* FLOOR - Vector Floor
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function FLOOR(
inRecords: Record[],
inRealName: string,
startIdx?: number,
endIdx?: number
): number[];
/**
* HT_DCPERIOD - Hilbert Transform - Dominant Cycle Period
*
* @param {number[]} inReal - Double Array
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function HT_DCPERIOD(
inReal: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* HT_DCPERIOD - Hilbert Transform - Dominant Cycle Period
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function HT_DCPERIOD(
inRecords: Record[],
inRealName: string,
startIdx?: number,
endIdx?: number
): number[];
/**
* HT_DCPHASE - Hilbert Transform - Dominant Cycle Phase
*
* @param {number[]} inReal - Double Array
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function HT_DCPHASE(
inReal: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* HT_DCPHASE - Hilbert Transform - Dominant Cycle Phase
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function HT_DCPHASE(
inRecords: Record[],
inRealName: string,
startIdx?: number,
endIdx?: number
): number[];
/**
* HT_PHASOR - Hilbert Transform - Phasor Components
*
* @param {number[]} inReal - Double Array
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {[number[], number[]]} - [outInPhase, outQuadrature]
*/
export declare function HT_PHASOR(
inReal: number[],
startIdx?: number,
endIdx?: number
): [number[], number[]];
/**
* HT_PHASOR - Hilbert Transform - Phasor Components
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {[number[], number[]]} - [outInPhase, outQuadrature]
*/
export declare function HT_PHASOR(
inRecords: Record[],
inRealName: string,
startIdx?: number,
endIdx?: number
): [number[], number[]];
/**
* HT_SINE - Hilbert Transform - SineWave
*
* @param {number[]} inReal - Double Array
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {[number[], number[]]} - [outSine, outLeadSine]
*/
export declare function HT_SINE(
inReal: number[],
startIdx?: number,
endIdx?: number
): [number[], number[]];
/**
* HT_SINE - Hilbert Transform - SineWave
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {[number[], number[]]} - [outSine, outLeadSine]
*/
export declare function HT_SINE(
inRecords: Record[],
inRealName: string,
startIdx?: number,
endIdx?: number
): [number[], number[]];
/**
* HT_TRENDLINE - Hilbert Transform - Instantaneous Trendline
*
* @param {number[]} inReal - Double Array
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function HT_TRENDLINE(
inReal: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* HT_TRENDLINE - Hilbert Transform - Instantaneous Trendline
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function HT_TRENDLINE(
inRecords: Record[],
inRealName: string,
startIdx?: number,
endIdx?: number
): number[];
/**
* HT_TRENDMODE - Hilbert Transform - Trend vs Cycle Mode
*
* @param {number[]} inReal - Double Array
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function HT_TRENDMODE(
inReal: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* HT_TRENDMODE - Hilbert Transform - Trend vs Cycle Mode
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function HT_TRENDMODE(
inRecords: Record[],
inRealName: string,
startIdx?: number,
endIdx?: number
): number[];
/**
* IMI - Intraday Momentum Index
*
* @param {number[]} inOpen - Open
* @param {number[]} inClose - Close
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function IMI(
inOpen: number[],
inClose: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* IMI - Intraday Momentum Index
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function IMI(
inRecords: Record[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* KAMA - Kaufman Adaptive Moving Average
*
* @param {number[]} inReal - Double Array
* @param {number} [optTime_Period=30] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function KAMA(
inReal: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* KAMA - Kaufman Adaptive Moving Average
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optTime_Period=30] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function KAMA(
inRecords: Record[],
inRealName: string,
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* LINEARREG - Linear Regression
*
* @param {number[]} inReal - Double Array
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function LINEARREG(
inReal: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* LINEARREG - Linear Regression
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function LINEARREG(
inRecords: Record[],
inRealName: string,
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* LINEARREG_ANGLE - Linear Regression Angle
*
* @param {number[]} inReal - Double Array
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function LINEARREG_ANGLE(
inReal: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* LINEARREG_ANGLE - Linear Regression Angle
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function LINEARREG_ANGLE(
inRecords: Record[],
inRealName: string,
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* LINEARREG_INTERCEPT - Linear Regression Intercept
*
* @param {number[]} inReal - Double Array
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function LINEARREG_INTERCEPT(
inReal: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* LINEARREG_INTERCEPT - Linear Regression Intercept
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function LINEARREG_INTERCEPT(
inRecords: Record[],
inRealName: string,
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* LINEARREG_SLOPE - Linear Regression Slope
*
* @param {number[]} inReal - Double Array
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function LINEARREG_SLOPE(
inReal: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* LINEARREG_SLOPE - Linear Regression Slope
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function LINEARREG_SLOPE(
inRecords: Record[],
inRealName: string,
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* LN - Vector Log Natural
*
* @param {number[]} inReal - Double Array
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function LN(
inReal: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* LN - Vector Log Natural
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function LN(
inRecords: Record[],
inRealName: string,
startIdx?: number,
endIdx?: number
): number[];
/**
* LOG10 - Vector Log10
*
* @param {number[]} inReal - Double Array
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function LOG10(
inReal: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* LOG10 - Vector Log10
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function LOG10(
inRecords: Record[],
inRealName: string,
startIdx?: number,
endIdx?: number
): number[];
/**
* MA - Moving average
*
* @param {number[]} inReal - Double Array
* @param {number} [optTime_Period=30] - Number of period
* @param {MATypes} [optMA_Type=SMA] - Type of Moving Average
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function MA(
inReal: number[],
optTime_Period?: number,
optMA_Type?: MATypes,
startIdx?: number,
endIdx?: number
): number[];
/**
* MA - Moving average
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optTime_Period=30] - Number of period
* @param {MATypes} [optMA_Type=SMA] - Type of Moving Average
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function MA(
inRecords: Record[],
inRealName: string,
optTime_Period?: number,
optMA_Type?: MATypes,
startIdx?: number,
endIdx?: number
): number[];
/**
* MACD - Moving Average Convergence/Divergence
*
* @param {number[]} inReal - Double Array
* @param {number} [optFast_Period=12] - Number of period for the fast MA
* @param {number} [optSlow_Period=26] - Number of period for the slow MA
* @param {number} [optSignal_Period=9] - Smoothing for the signal line (nb of period)
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {[number[], number[], number[]]} - [outMACD, outMACDSignal, outMACDHist]
*/
export declare function MACD(
inReal: number[],
optFast_Period?: number,
optSlow_Period?: number,
optSignal_Period?: number,
startIdx?: number,
endIdx?: number
): [number[], number[], number[]];
/**
* MACD - Moving Average Convergence/Divergence
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optFast_Period=12] - Number of period for the fast MA
* @param {number} [optSlow_Period=26] - Number of period for the slow MA
* @param {number} [optSignal_Period=9] - Smoothing for the signal line (nb of period)
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {[number[], number[], number[]]} - [outMACD, outMACDSignal, outMACDHist]
*/
export declare function MACD(
inRecords: Record[],
inRealName: string,
optFast_Period?: number,
optSlow_Period?: number,
optSignal_Period?: number,
startIdx?: number,
endIdx?: number
): [number[], number[], number[]];
/**
* MACDEXT - MACD with controllable MA type
*
* @param {number[]} inReal - Double Array
* @param {number} [optFast_Period=12] - Number of period for the fast MA
* @param {MATypes} [optFast_MA=SMA] - Type of Moving Average for fast MA
* @param {number} [optSlow_Period=26] - Number of period for the slow MA
* @param {MATypes} [optSlow_MA=SMA] - Type of Moving Average for slow MA
* @param {number} [optSignal_Period=9] - Smoothing for the signal line (nb of period)
* @param {MATypes} [optSignal_MA=SMA] - Type of Moving Average for signal line
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {[number[], number[], number[]]} - [outMACD, outMACDSignal, outMACDHist]
*/
export declare function MACDEXT(
inReal: number[],
optFast_Period?: number,
optFast_MA?: MATypes,
optSlow_Period?: number,
optSlow_MA?: MATypes,
optSignal_Period?: number,
optSignal_MA?: MATypes,
startIdx?: number,
endIdx?: number
): [number[], number[], number[]];
/**
* MACDEXT - MACD with controllable MA type
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optFast_Period=12] - Number of period for the fast MA
* @param {MATypes} [optFast_MA=SMA] - Type of Moving Average for fast MA
* @param {number} [optSlow_Period=26] - Number of period for the slow MA
* @param {MATypes} [optSlow_MA=SMA] - Type of Moving Average for slow MA
* @param {number} [optSignal_Period=9] - Smoothing for the signal line (nb of period)
* @param {MATypes} [optSignal_MA=SMA] - Type of Moving Average for signal line
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {[number[], number[], number[]]} - [outMACD, outMACDSignal, outMACDHist]
*/
export declare function MACDEXT(
inRecords: Record[],
inRealName: string,
optFast_Period?: number,
optFast_MA?: MATypes,
optSlow_Period?: number,
optSlow_MA?: MATypes,
optSignal_Period?: number,
optSignal_MA?: MATypes,
startIdx?: number,
endIdx?: number
): [number[], number[], number[]];
/**
* MACDFIX - Moving Average Convergence/Divergence Fix 12/26
*
* @param {number[]} inReal - Double Array
* @param {number} [optSignal_Period=9] - Smoothing for the signal line (nb of period)
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {[number[], number[], number[]]} - [outMACD, outMACDSignal, outMACDHist]
*/
export declare function MACDFIX(
inReal: number[],
optSignal_Period?: number,
startIdx?: number,
endIdx?: number
): [number[], number[], number[]];
/**
* MACDFIX - Moving Average Convergence/Divergence Fix 12/26
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optSignal_Period=9] - Smoothing for the signal line (nb of period)
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {[number[], number[], number[]]} - [outMACD, outMACDSignal, outMACDHist]
*/
export declare function MACDFIX(
inRecords: Record[],
inRealName: string,
optSignal_Period?: number,
startIdx?: number,
endIdx?: number
): [number[], number[], number[]];
/**
* MAMA - MESA Adaptive Moving Average
*
* @param {number[]} inReal - Double Array
* @param {number} [optFast_Limit=5.000000e-1] - Upper limit use in the adaptive algorithm
* @param {number} [optSlow_Limit=5.000000e-2] - Lower limit use in the adaptive algorithm
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {[number[], number[]]} - [outMAMA, outFAMA]
*/
export declare function MAMA(
inReal: number[],
optFast_Limit?: number,
optSlow_Limit?: number,
startIdx?: number,
endIdx?: number
): [number[], number[]];
/**
* MAMA - MESA Adaptive Moving Average
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optFast_Limit=5.000000e-1] - Upper limit use in the adaptive algorithm
* @param {number} [optSlow_Limit=5.000000e-2] - Lower limit use in the adaptive algorithm
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {[number[], number[]]} - [outMAMA, outFAMA]
*/
export declare function MAMA(
inRecords: Record[],
inRealName: string,
optFast_Limit?: number,
optSlow_Limit?: number,
startIdx?: number,
endIdx?: number
): [number[], number[]];
/**
* MAVP - Moving average with variable period
*
* @param {number[]} inReal - Double Array
* @param {number[]} inPeriods - Double Array
* @param {number} [optMinimum_Period=2] - Value less than minimum will be changed to Minimum period
* @param {number} [optMaximum_Period=30] - Value higher than maximum will be changed to Maximum period
* @param {MATypes} [optMA_Type=SMA] - Type of Moving Average
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function MAVP(
inReal: number[],
inPeriods: number[],
optMinimum_Period?: number,
optMaximum_Period?: number,
optMA_Type?: MATypes,
startIdx?: number,
endIdx?: number
): number[];
/**
* MAVP - Moving average with variable period
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {string} inPeriodsName - The field name to extract from `inRecords`
* @param {number} [optMinimum_Period=2] - Value less than minimum will be changed to Minimum period
* @param {number} [optMaximum_Period=30] - Value higher than maximum will be changed to Maximum period
* @param {MATypes} [optMA_Type=SMA] - Type of Moving Average
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function MAVP(
inRecords: Record[],
inRealName: string,
inPeriodsName: string,
optMinimum_Period?: number,
optMaximum_Period?: number,
optMA_Type?: MATypes,
startIdx?: number,
endIdx?: number
): number[];
/**
* MAX - Highest value over a specified period
*
* @param {number[]} inReal - Double Array
* @param {number} [optTime_Period=30] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function MAX(
inReal: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* MAX - Highest value over a specified period
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optTime_Period=30] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function MAX(
inRecords: Record[],
inRealName: string,
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* MAXINDEX - Index of highest value over a specified period
*
* @param {number[]} inReal - Double Array
* @param {number} [optTime_Period=30] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function MAXINDEX(
inReal: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* MAXINDEX - Index of highest value over a specified period
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optTime_Period=30] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function MAXINDEX(
inRecords: Record[],
inRealName: string,
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* MEDPRICE - Median Price
*
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function MEDPRICE(
inHigh: number[],
inLow: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* MEDPRICE - Median Price
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function MEDPRICE(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* MFI - Money Flow Index
*
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number[]} inVolume - Volume
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function MFI(
inHigh: number[],
inLow: number[],
inClose: number[],
inVolume: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* MFI - Money Flow Index
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function MFI(
inRecords: Record[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* MIDPOINT - MidPoint over period
*
* @param {number[]} inReal - Double Array
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function MIDPOINT(
inReal: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* MIDPOINT - MidPoint over period
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function MIDPOINT(
inRecords: Record[],
inRealName: string,
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* MIDPRICE - Midpoint Price over period
*
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function MIDPRICE(
inHigh: number[],
inLow: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* MIDPRICE - Midpoint Price over period
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function MIDPRICE(
inRecords: Record[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* MIN - Lowest value over a specified period
*
* @param {number[]} inReal - Double Array
* @param {number} [optTime_Period=30] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function MIN(
inReal: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* MIN - Lowest value over a specified period
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optTime_Period=30] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function MIN(
inRecords: Record[],
inRealName: string,
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* MININDEX - Index of lowest value over a specified period
*
* @param {number[]} inReal - Double Array
* @param {number} [optTime_Period=30] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function MININDEX(
inReal: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* MININDEX - Index of lowest value over a specified period
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optTime_Period=30] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outInteger (Integer Array)
*/
export declare function MININDEX(
inRecords: Record[],
inRealName: string,
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* MINMAX - Lowest and highest values over a specified period
*
* @param {number[]} inReal - Double Array
* @param {number} [optTime_Period=30] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {[number[], number[]]} - [outMin, outMax]
*/
export declare function MINMAX(
inReal: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): [number[], number[]];
/**
* MINMAX - Lowest and highest values over a specified period
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optTime_Period=30] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {[number[], number[]]} - [outMin, outMax]
*/
export declare function MINMAX(
inRecords: Record[],
inRealName: string,
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): [number[], number[]];
/**
* MINMAXINDEX - Indexes of lowest and highest values over a specified period
*
* @param {number[]} inReal - Double Array
* @param {number} [optTime_Period=30] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {[number[], number[]]} - [outMinIdx, outMaxIdx]
*/
export declare function MINMAXINDEX(
inReal: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): [number[], number[]];
/**
* MINMAXINDEX - Indexes of lowest and highest values over a specified period
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optTime_Period=30] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {[number[], number[]]} - [outMinIdx, outMaxIdx]
*/
export declare function MINMAXINDEX(
inRecords: Record[],
inRealName: string,
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): [number[], number[]];
/**
* MINUS_DI - Minus Directional Indicator
*
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function MINUS_DI(
inHigh: number[],
inLow: number[],
inClose: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* MINUS_DI - Minus Directional Indicator
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function MINUS_DI(
inRecords: Record[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* MINUS_DM - Minus Directional Movement
*
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function MINUS_DM(
inHigh: number[],
inLow: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* MINUS_DM - Minus Directional Movement
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function MINUS_DM(
inRecords: Record[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* MOM - Momentum
*
* @param {number[]} inReal - Double Array
* @param {number} [optTime_Period=10] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function MOM(
inReal: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* MOM - Momentum
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optTime_Period=10] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function MOM(
inRecords: Record[],
inRealName: string,
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* MULT - Vector Arithmetic Mult
*
* @param {number[]} inReal0 - Double Array
* @param {number[]} inReal1 - Double Array
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function MULT(
inReal0: number[],
inReal1: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* MULT - Vector Arithmetic Mult
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inReal0Name - The field name to extract from `inRecords`
* @param {string} inReal1Name - The field name to extract from `inRecords`
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function MULT(
inRecords: Record[],
inReal0Name: string,
inReal1Name: string,
startIdx?: number,
endIdx?: number
): number[];
/**
* NATR - Normalized Average True Range
*
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function NATR(
inHigh: number[],
inLow: number[],
inClose: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* NATR - Normalized Average True Range
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function NATR(
inRecords: Record[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* OBV - On Balance Volume
*
* @param {number[]} inReal - Double Array
* @param {number[]} inVolume - Volume
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function OBV(
inReal: number[],
inVolume: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* OBV - On Balance Volume
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function OBV(
inRecords: Record[],
inRealName: string,
startIdx?: number,
endIdx?: number
): number[];
/**
* PLUS_DI - Plus Directional Indicator
*
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function PLUS_DI(
inHigh: number[],
inLow: number[],
inClose: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* PLUS_DI - Plus Directional Indicator
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function PLUS_DI(
inRecords: Record[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* PLUS_DM - Plus Directional Movement
*
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function PLUS_DM(
inHigh: number[],
inLow: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* PLUS_DM - Plus Directional Movement
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function PLUS_DM(
inRecords: Record[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* PPO - Percentage Price Oscillator
*
* @param {number[]} inReal - Double Array
* @param {number} [optFast_Period=12] - Number of period for the fast MA
* @param {number} [optSlow_Period=26] - Number of period for the slow MA
* @param {MATypes} [optMA_Type=SMA] - Type of Moving Average
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function PPO(
inReal: number[],
optFast_Period?: number,
optSlow_Period?: number,
optMA_Type?: MATypes,
startIdx?: number,
endIdx?: number
): number[];
/**
* PPO - Percentage Price Oscillator
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optFast_Period=12] - Number of period for the fast MA
* @param {number} [optSlow_Period=26] - Number of period for the slow MA
* @param {MATypes} [optMA_Type=SMA] - Type of Moving Average
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function PPO(
inRecords: Record[],
inRealName: string,
optFast_Period?: number,
optSlow_Period?: number,
optMA_Type?: MATypes,
startIdx?: number,
endIdx?: number
): number[];
/**
* ROC - Rate of change : ((price/prevPrice)-1)*100
*
* @param {number[]} inReal - Double Array
* @param {number} [optTime_Period=10] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function ROC(
inReal: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* ROC - Rate of change : ((price/prevPrice)-1)*100
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optTime_Period=10] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function ROC(
inRecords: Record[],
inRealName: string,
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* ROCP - Rate of change Percentage: (price-prevPrice)/prevPrice
*
* @param {number[]} inReal - Double Array
* @param {number} [optTime_Period=10] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function ROCP(
inReal: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* ROCP - Rate of change Percentage: (price-prevPrice)/prevPrice
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optTime_Period=10] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function ROCP(
inRecords: Record[],
inRealName: string,
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* ROCR - Rate of change ratio: (price/prevPrice)
*
* @param {number[]} inReal - Double Array
* @param {number} [optTime_Period=10] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function ROCR(
inReal: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* ROCR - Rate of change ratio: (price/prevPrice)
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optTime_Period=10] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function ROCR(
inRecords: Record[],
inRealName: string,
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* ROCR100 - Rate of change ratio 100 scale: (price/prevPrice)*100
*
* @param {number[]} inReal - Double Array
* @param {number} [optTime_Period=10] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function ROCR100(
inReal: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* ROCR100 - Rate of change ratio 100 scale: (price/prevPrice)*100
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optTime_Period=10] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function ROCR100(
inRecords: Record[],
inRealName: string,
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* RSI - Relative Strength Index
*
* @param {number[]} inReal - Double Array
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function RSI(
inReal: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* RSI - Relative Strength Index
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function RSI(
inRecords: Record[],
inRealName: string,
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* SAR - Parabolic SAR
*
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number} [optAcceleration_Factor=2.000000e-2] - Acceleration Factor used up to the Maximum value
* @param {number} [optAF_Maximum=2.000000e-1] - Acceleration Factor Maximum value
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function SAR(
inHigh: number[],
inLow: number[],
optAcceleration_Factor?: number,
optAF_Maximum?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* SAR - Parabolic SAR
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [optAcceleration_Factor=2.000000e-2] - Acceleration Factor used up to the Maximum value
* @param {number} [optAF_Maximum=2.000000e-1] - Acceleration Factor Maximum value
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function SAR(
inRecords: Record[],
optAcceleration_Factor?: number,
optAF_Maximum?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* SAREXT - Parabolic SAR - Extended
*
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number} [optStart_Value=0.000000e+0] - Start value and direction. 0 for Auto, >0 for Long, <0 for Short
* @param {number} [optOffset_on_Reverse=0.000000e+0] - Percent offset added/removed to initial stop on short/long reversal
* @param {number} [optAF_Init_Long=2.000000e-2] - Acceleration Factor initial value for the Long direction
* @param {number} [optAF_Long=2.000000e-2] - Acceleration Factor for the Long direction
* @param {number} [optAF_Max_Long=2.000000e-1] - Acceleration Factor maximum value for the Long direction
* @param {number} [optAF_Init_Short=2.000000e-2] - Acceleration Factor initial value for the Short direction
* @param {number} [optAF_Short=2.000000e-2] - Acceleration Factor for the Short direction
* @param {number} [optAF_Max_Short=2.000000e-1] - Acceleration Factor maximum value for the Short direction
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function SAREXT(
inHigh: number[],
inLow: number[],
optStart_Value?: number,
optOffset_on_Reverse?: number,
optAF_Init_Long?: number,
optAF_Long?: number,
optAF_Max_Long?: number,
optAF_Init_Short?: number,
optAF_Short?: number,
optAF_Max_Short?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* SAREXT - Parabolic SAR - Extended
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [optStart_Value=0.000000e+0] - Start value and direction. 0 for Auto, >0 for Long, <0 for Short
* @param {number} [optOffset_on_Reverse=0.000000e+0] - Percent offset added/removed to initial stop on short/long reversal
* @param {number} [optAF_Init_Long=2.000000e-2] - Acceleration Factor initial value for the Long direction
* @param {number} [optAF_Long=2.000000e-2] - Acceleration Factor for the Long direction
* @param {number} [optAF_Max_Long=2.000000e-1] - Acceleration Factor maximum value for the Long direction
* @param {number} [optAF_Init_Short=2.000000e-2] - Acceleration Factor initial value for the Short direction
* @param {number} [optAF_Short=2.000000e-2] - Acceleration Factor for the Short direction
* @param {number} [optAF_Max_Short=2.000000e-1] - Acceleration Factor maximum value for the Short direction
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function SAREXT(
inRecords: Record[],
optStart_Value?: number,
optOffset_on_Reverse?: number,
optAF_Init_Long?: number,
optAF_Long?: number,
optAF_Max_Long?: number,
optAF_Init_Short?: number,
optAF_Short?: number,
optAF_Max_Short?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* SIN - Vector Trigonometric Sin
*
* @param {number[]} inReal - Double Array
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function SIN(
inReal: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* SIN - Vector Trigonometric Sin
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function SIN(
inRecords: Record[],
inRealName: string,
startIdx?: number,
endIdx?: number
): number[];
/**
* SINH - Vector Trigonometric Sinh
*
* @param {number[]} inReal - Double Array
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function SINH(
inReal: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* SINH - Vector Trigonometric Sinh
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function SINH(
inRecords: Record[],
inRealName: string,
startIdx?: number,
endIdx?: number
): number[];
/**
* SMA - Simple Moving Average
*
* @param {number[]} inReal - Double Array
* @param {number} [optTime_Period=30] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function SMA(
inReal: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* SMA - Simple Moving Average
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optTime_Period=30] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function SMA(
inRecords: Record[],
inRealName: string,
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* SQRT - Vector Square Root
*
* @param {number[]} inReal - Double Array
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function SQRT(
inReal: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* SQRT - Vector Square Root
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function SQRT(
inRecords: Record[],
inRealName: string,
startIdx?: number,
endIdx?: number
): number[];
/**
* STDDEV - Standard Deviation
*
* @param {number[]} inReal - Double Array
* @param {number} [optTime_Period=5] - Number of period
* @param {number} [optDeviations=1.000000e+0] - Nb of deviations
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function STDDEV(
inReal: number[],
optTime_Period?: number,
optDeviations?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* STDDEV - Standard Deviation
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optTime_Period=5] - Number of period
* @param {number} [optDeviations=1.000000e+0] - Nb of deviations
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function STDDEV(
inRecords: Record[],
inRealName: string,
optTime_Period?: number,
optDeviations?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* STOCH - Stochastic
*
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [optFastK_Period=5] - Time period for building the Fast-K line
* @param {number} [optSlowK_Period=3] - Smoothing for making the Slow-K line. Usually set to 3
* @param {MATypes} [optSlowK_MA=SMA] - Type of Moving Average for Slow-K
* @param {number} [optSlowD_Period=3] - Smoothing for making the Slow-D line
* @param {MATypes} [optSlowD_MA=SMA] - Type of Moving Average for Slow-D
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {[number[], number[]]} - [outSlowK, outSlowD]
*/
export declare function STOCH(
inHigh: number[],
inLow: number[],
inClose: number[],
optFastK_Period?: number,
optSlowK_Period?: number,
optSlowK_MA?: MATypes,
optSlowD_Period?: number,
optSlowD_MA?: MATypes,
startIdx?: number,
endIdx?: number
): [number[], number[]];
/**
* STOCH - Stochastic
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [optFastK_Period=5] - Time period for building the Fast-K line
* @param {number} [optSlowK_Period=3] - Smoothing for making the Slow-K line. Usually set to 3
* @param {MATypes} [optSlowK_MA=SMA] - Type of Moving Average for Slow-K
* @param {number} [optSlowD_Period=3] - Smoothing for making the Slow-D line
* @param {MATypes} [optSlowD_MA=SMA] - Type of Moving Average for Slow-D
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {[number[], number[]]} - [outSlowK, outSlowD]
*/
export declare function STOCH(
inRecords: Record[],
optFastK_Period?: number,
optSlowK_Period?: number,
optSlowK_MA?: MATypes,
optSlowD_Period?: number,
optSlowD_MA?: MATypes,
startIdx?: number,
endIdx?: number
): [number[], number[]];
/**
* STOCHF - Stochastic Fast
*
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [optFastK_Period=5] - Time period for building the Fast-K line
* @param {number} [optFastD_Period=3] - Smoothing for making the Fast-D line. Usually set to 3
* @param {MATypes} [optFastD_MA=SMA] - Type of Moving Average for Fast-D
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {[number[], number[]]} - [outFastK, outFastD]
*/
export declare function STOCHF(
inHigh: number[],
inLow: number[],
inClose: number[],
optFastK_Period?: number,
optFastD_Period?: number,
optFastD_MA?: MATypes,
startIdx?: number,
endIdx?: number
): [number[], number[]];
/**
* STOCHF - Stochastic Fast
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [optFastK_Period=5] - Time period for building the Fast-K line
* @param {number} [optFastD_Period=3] - Smoothing for making the Fast-D line. Usually set to 3
* @param {MATypes} [optFastD_MA=SMA] - Type of Moving Average for Fast-D
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {[number[], number[]]} - [outFastK, outFastD]
*/
export declare function STOCHF(
inRecords: Record[],
optFastK_Period?: number,
optFastD_Period?: number,
optFastD_MA?: MATypes,
startIdx?: number,
endIdx?: number
): [number[], number[]];
/**
* STOCHRSI - Stochastic Relative Strength Index
*
* @param {number[]} inReal - Double Array
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [optFastK_Period=5] - Time period for building the Fast-K line
* @param {number} [optFastD_Period=3] - Smoothing for making the Fast-D line. Usually set to 3
* @param {MATypes} [optFastD_MA=SMA] - Type of Moving Average for Fast-D
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {[number[], number[]]} - [outFastK, outFastD]
*/
export declare function STOCHRSI(
inReal: number[],
optTime_Period?: number,
optFastK_Period?: number,
optFastD_Period?: number,
optFastD_MA?: MATypes,
startIdx?: number,
endIdx?: number
): [number[], number[]];
/**
* STOCHRSI - Stochastic Relative Strength Index
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [optFastK_Period=5] - Time period for building the Fast-K line
* @param {number} [optFastD_Period=3] - Smoothing for making the Fast-D line. Usually set to 3
* @param {MATypes} [optFastD_MA=SMA] - Type of Moving Average for Fast-D
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {[number[], number[]]} - [outFastK, outFastD]
*/
export declare function STOCHRSI(
inRecords: Record[],
inRealName: string,
optTime_Period?: number,
optFastK_Period?: number,
optFastD_Period?: number,
optFastD_MA?: MATypes,
startIdx?: number,
endIdx?: number
): [number[], number[]];
/**
* SUB - Vector Arithmetic Substraction
*
* @param {number[]} inReal0 - Double Array
* @param {number[]} inReal1 - Double Array
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function SUB(
inReal0: number[],
inReal1: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* SUB - Vector Arithmetic Substraction
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inReal0Name - The field name to extract from `inRecords`
* @param {string} inReal1Name - The field name to extract from `inRecords`
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function SUB(
inRecords: Record[],
inReal0Name: string,
inReal1Name: string,
startIdx?: number,
endIdx?: number
): number[];
/**
* SUM - Summation
*
* @param {number[]} inReal - Double Array
* @param {number} [optTime_Period=30] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function SUM(
inReal: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* SUM - Summation
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optTime_Period=30] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function SUM(
inRecords: Record[],
inRealName: string,
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* T3 - Triple Exponential Moving Average (T3)
*
* @param {number[]} inReal - Double Array
* @param {number} [optTime_Period=5] - Number of period
* @param {number} [optVolume_Factor=7.000000e-1] - Volume Factor
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function T3(
inReal: number[],
optTime_Period?: number,
optVolume_Factor?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* T3 - Triple Exponential Moving Average (T3)
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optTime_Period=5] - Number of period
* @param {number} [optVolume_Factor=7.000000e-1] - Volume Factor
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function T3(
inRecords: Record[],
inRealName: string,
optTime_Period?: number,
optVolume_Factor?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* TAN - Vector Trigonometric Tan
*
* @param {number[]} inReal - Double Array
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function TAN(
inReal: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* TAN - Vector Trigonometric Tan
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function TAN(
inRecords: Record[],
inRealName: string,
startIdx?: number,
endIdx?: number
): number[];
/**
* TANH - Vector Trigonometric Tanh
*
* @param {number[]} inReal - Double Array
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function TANH(
inReal: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* TANH - Vector Trigonometric Tanh
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function TANH(
inRecords: Record[],
inRealName: string,
startIdx?: number,
endIdx?: number
): number[];
/**
* TEMA - Triple Exponential Moving Average
*
* @param {number[]} inReal - Double Array
* @param {number} [optTime_Period=30] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function TEMA(
inReal: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* TEMA - Triple Exponential Moving Average
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optTime_Period=30] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function TEMA(
inRecords: Record[],
inRealName: string,
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* TRANGE - True Range
*
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function TRANGE(
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* TRANGE - True Range
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function TRANGE(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* TRIMA - Triangular Moving Average
*
* @param {number[]} inReal - Double Array
* @param {number} [optTime_Period=30] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function TRIMA(
inReal: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* TRIMA - Triangular Moving Average
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optTime_Period=30] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function TRIMA(
inRecords: Record[],
inRealName: string,
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* TRIX - 1-day Rate-Of-Change (ROC) of a Triple Smooth EMA
*
* @param {number[]} inReal - Double Array
* @param {number} [optTime_Period=30] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function TRIX(
inReal: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* TRIX - 1-day Rate-Of-Change (ROC) of a Triple Smooth EMA
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optTime_Period=30] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function TRIX(
inRecords: Record[],
inRealName: string,
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* TSF - Time Series Forecast
*
* @param {number[]} inReal - Double Array
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function TSF(
inReal: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* TSF - Time Series Forecast
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function TSF(
inRecords: Record[],
inRealName: string,
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* TYPPRICE - Typical Price
*
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function TYPPRICE(
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* TYPPRICE - Typical Price
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function TYPPRICE(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* ULTOSC - Ultimate Oscillator
*
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [optFirst_Period=7] - Number of bars for 1st period.
* @param {number} [optSecond_Period=14] - Number of bars fro 2nd period
* @param {number} [optThird_Period=28] - Number of bars for 3rd period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function ULTOSC(
inHigh: number[],
inLow: number[],
inClose: number[],
optFirst_Period?: number,
optSecond_Period?: number,
optThird_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* ULTOSC - Ultimate Oscillator
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [optFirst_Period=7] - Number of bars for 1st period.
* @param {number} [optSecond_Period=14] - Number of bars fro 2nd period
* @param {number} [optThird_Period=28] - Number of bars for 3rd period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function ULTOSC(
inRecords: Record[],
optFirst_Period?: number,
optSecond_Period?: number,
optThird_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* VAR - Variance
*
* @param {number[]} inReal - Double Array
* @param {number} [optTime_Period=5] - Number of period
* @param {number} [optDeviations=1.000000e+0] - Nb of deviations
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function VAR(
inReal: number[],
optTime_Period?: number,
optDeviations?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* VAR - Variance
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optTime_Period=5] - Number of period
* @param {number} [optDeviations=1.000000e+0] - Nb of deviations
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function VAR(
inRecords: Record[],
inRealName: string,
optTime_Period?: number,
optDeviations?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* WCLPRICE - Weighted Close Price
*
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function WCLPRICE(
inHigh: number[],
inLow: number[],
inClose: number[],
startIdx?: number,
endIdx?: number
): number[];
/**
* WCLPRICE - Weighted Close Price
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function WCLPRICE(
inRecords: Record[],
startIdx?: number,
endIdx?: number
): number[];
/**
* WILLR - Williams' %R
*
* @param {number[]} inHigh - High
* @param {number[]} inLow - Low
* @param {number[]} inClose - Close
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function WILLR(
inHigh: number[],
inLow: number[],
inClose: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* WILLR - Williams' %R
*
* @param {Record[]} inRecords - The records to extract data
* @param {number} [optTime_Period=14] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function WILLR(
inRecords: Record[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* WMA - Weighted Moving Average
*
* @param {number[]} inReal - Double Array
* @param {number} [optTime_Period=30] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength-1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function WMA(
inReal: number[],
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[];
/**
* WMA - Weighted Moving Average
*
* @param {Record[]} inRecords - The records to extract data
* @param {string} inRealName - The field name to extract from `inRecords`
* @param {number} [optTime_Period=30] - Number of period
* @param {number} [startIdx=0] - The start index to process
* @param {number} [endIdx=inLength - 1] - The end index to process, please not that the value is included, default is the input records length - 1
* @returns {number[]} - outReal (Double Array)
*/
export declare function WMA(
inRecords: Record[],
inRealName: string,
optTime_Period?: number,
startIdx?: number,
endIdx?: number
): number[]; | the_stack |
import {
command,
CommandFunction,
CreateExtensionPlugin,
EditorState,
EditorView,
extension,
FromToProps,
Handler,
hasTransactionChanged,
Helper,
helper,
invariant,
isDomNode,
isEmptyArray,
isEqual,
isNumber,
isString,
PlainExtension,
Static,
Transaction,
} from '@remirror/core';
import { Mapping, StepMap } from '@remirror/pm/transform';
import { Decoration, DecorationSet } from '@remirror/pm/view';
import { Commit, Span, TrackState } from './diff-utils';
export interface DiffOptions {
/**
* @default 'blame-marker';
*/
blameMarkerClass?: Static<string>;
/**
* @default `(message: string) => "Revert: '" + message + "'"`
*/
revertMessage?: (message: string) => string;
/**
* A handler that is called whenever a tracked change is hovered over in the
* editor.
*/
onMouseOverCommit?: Handler<(props: HandlerProps) => void>;
/**
* A handler that is called whenever a tracked change was being hovered is no
* longer hovered.
*/
onMouseLeaveCommit?: Handler<(props: HandlerProps) => void>;
/**
* Called when the commit is part of the current text selection. Called with
* an array of possible selection.
*/
onSelectCommits?: Handler<
(selections: HandlerProps[], previousSelections?: HandlerProps[]) => void
>;
/**
* Called when commits are deselected.
*/
onDeselectCommits?: Handler<(selections: HandlerProps[]) => void>;
}
/**
* An extension for the remirror editor. CHANGE ME.
*/
@extension<DiffOptions>({
defaultOptions: {
blameMarkerClass: 'blame-marker',
revertMessage: (message: string) => `Revert: '${message}'`,
},
staticKeys: ['blameMarkerClass'],
handlerKeys: ['onMouseOverCommit', 'onMouseLeaveCommit', 'onSelectCommits', 'onDeselectCommits'],
})
export class DiffExtension extends PlainExtension<DiffOptions> {
get name() {
return 'diff' as const;
}
private hovered?: HandlerProps;
private selections?: HandlerProps[];
/**
* Create the custom change tracking plugin.
*
* This has been adapted from the prosemirror website demo.
* https://github.com/ProseMirror/website/blob/master/example/track/index.js
*/
createPlugin(): CreateExtensionPlugin {
return {
state: {
init: (_, state) => {
return this.createInitialState(state);
},
apply: (tr, pluginState: DiffPluginState, _: EditorState, state: EditorState) => {
const newState = this.applyStateUpdates(tr, pluginState, state);
this.handleSelection(tr, newState);
return newState;
},
},
props: {
decorations: (state) => {
return this.getPluginState<DiffPluginState>(state).decorations;
},
handleDOMEvents: {
mouseover: (view, event) => {
return this.handlerMouseOver(view, event);
},
mouseleave: (view, event) => {
return this.handleMouseLeave(view, event);
},
},
},
};
}
/**
* Highlight the provided commit.
*/
@command()
highlightCommit(commit: Commit | CommitId): CommandFunction {
return (props) => {
const { tr, dispatch } = props;
if (isString(commit)) {
commit = this.getIndexByName(commit);
}
if (!isNumber(commit)) {
commit = this.getCommitId(commit);
}
if (dispatch) {
dispatch(this.setMeta(tr, { add: commit }));
}
return true;
};
}
/**
* Remove the highlight from the commit.
*/
@command()
removeHighlightedCommit(commit: Commit | CommitId): CommandFunction {
return (props) => {
const { tr, dispatch } = props;
if (isString(commit)) {
commit = this.getIndexByName(commit);
}
if (!isNumber(commit)) {
commit = this.getCommitId(commit);
}
if (dispatch) {
dispatch(this.setMeta(tr, { clear: commit }));
}
return true;
};
}
/**
* Add a commit to the transaction history.
*/
@command()
commitChange(message: string): CommandFunction {
return (props) => {
const { tr, dispatch } = props;
if (dispatch) {
dispatch(this.setMeta(tr, { message }));
}
return true;
};
}
/**
* Revert a commit which was added to the transaction history.
*/
@command()
revertCommit(commit?: Commit): CommandFunction {
return (props) => {
const { state, tr, dispatch } = props;
if (!commit) {
commit = this.getCommit('last');
}
const { tracked } = this.getPluginState<DiffPluginState>(state);
const index = tracked.commits.indexOf(commit);
// If this commit is not in the history, we can't revert it
if (index === -1) {
return false;
}
// Reverting is only possible if there are no uncommitted changes
if (!isEmptyArray(tracked.uncommittedSteps)) {
// return alert('Commit your changes first!');
return false; // TODO add a handler here.
}
if (!dispatch) {
return true;
}
const commitMaps: StepMap[] = [];
for (const commit of tracked.commits.slice(index)) {
commitMaps.push(...commit.maps);
}
// This is the mapping from the document as it was at the start of
// the commit to the current document.
const remap = new Mapping(commitMaps);
// Build up a transaction that includes all (inverted) steps in this
// commit, rebased to the current document. They have to be applied
// in reverse order.
for (let index = commit.steps.length - 1; index >= 0; index--) {
// The mapping is sliced to not include maps for this step and the
// ones before it.
const remapped = commit.steps[index]?.map(remap.slice(index + 1));
if (!remapped) {
continue;
}
const result = tr.maybeStep(remapped);
// If the step can be applied, add its map to our mapping
// pipeline, so that subsequent steps are mapped over it.
if (result.doc) {
remap.appendMap(remapped.getMap(), index);
}
}
// Add a commit message and dispatch.
if (tr.docChanged) {
this.setMeta(tr, { message: this.options.revertMessage(commit.message) });
dispatch(tr);
}
return true;
};
}
/**
* Get the full list of tracked commit changes
*/
@helper()
getCommits(): Helper<Commit[]> {
return this.getPluginState<DiffPluginState>().tracked.commits;
}
private getIndexByName(name: 'first' | 'last') {
const length = this.getPluginState<DiffPluginState>().tracked.commits.length;
switch (name) {
case 'first':
return 0;
default:
return length - 1;
}
}
/**
* Get the commit by it's index
*/
@helper()
getCommit(id: CommitId): Helper<Commit> {
const commits = this.getPluginState<DiffPluginState>().tracked.commits;
const commit = isString(id) ? commits[this.getIndexByName(id)] : commits[id];
invariant(commit, {});
return commit;
}
private getCommitId(commit: Commit) {
const { tracked } = this.getPluginState<DiffPluginState>();
return tracked.commits.indexOf(commit);
}
/**
* Get the meta data for this custom plugin.
*/
private getMeta(tr: Transaction): DiffMeta {
return tr.getMeta(this.pluginKey) ?? {};
}
/**
* Set the meta data for the plugin.
*/
private setMeta(tr: Transaction, meta: DiffMeta): Transaction {
tr.setMeta(this.pluginKey, { ...this.getMeta(tr), ...meta });
return tr;
}
/**
* Calls the selection handlers when the selection changes the number of
* commit spans covered.
*/
private handleSelection(tr: Transaction, pluginState: DiffPluginState) {
if (!hasTransactionChanged(tr)) {
return;
}
const { from, to } = tr.selection;
const { blameMap, commits } = pluginState.tracked;
const selections: HandlerProps[] = [];
for (const map of blameMap) {
const selectionIncludesSpan =
(map.from <= from && map.to >= from) || (map.from <= to && map.to >= to);
if (!selectionIncludesSpan || !isNumber(map.commit) || map.commit >= commits.length) {
continue;
}
selections.push({ commit: this.getCommit(map.commit), from: map.from, to: map.to });
}
const selectionHasCommit = selections.length > 0;
if (selectionHasCommit && !isEqual(selections, this.selections)) {
this.options.onSelectCommits(selections, this.selections);
this.selections = selections;
return;
}
if (this.selections) {
this.options.onDeselectCommits(this.selections);
this.selections = undefined;
}
}
/**
* Transform the view and event into a commit and span.
*/
private getHandlerPropsFromEvent(view: EditorView, event: Event): HandlerProps | undefined {
if (!isDomNode(event.target)) {
return;
}
const pos = view.posAtDOM(event.target, 0);
const { tracked } = this.getPluginState<DiffPluginState>();
const span = tracked.blameMap.find((map) => map.from <= pos && map.to >= pos);
if (!span || !isNumber(span.commit)) {
return;
}
return { commit: this.getCommit(span.commit), from: span.from, to: span.to };
}
/**
* Capture the mouseover event and trigger the `onMouseOverCommit` handler
* when it is captured.
*/
private handlerMouseOver(view: EditorView, event: Event) {
const props = this.getHandlerPropsFromEvent(view, event);
if (props) {
this.hovered = props;
this.options.onMouseOverCommit(props);
}
return false;
}
/**
* Capture the mouseleave event and trigger the `onMouseLeaveCommit` handler.
*/
private handleMouseLeave(view: EditorView, event: Event) {
if (!this.hovered) {
return false;
}
const commit = this.getHandlerPropsFromEvent(view, event);
if (commit) {
this.hovered = undefined;
this.options.onMouseLeaveCommit(commit);
}
return false;
}
/**
* Create the initial plugin state for the custom plugin.
*/
private createInitialState(state: EditorState): DiffPluginState {
return {
tracked: new TrackState({
blameMap: [new Span({ from: 0, to: state.doc.content.size, commit: undefined })],
commits: [],
uncommittedMaps: [],
uncommittedSteps: [],
}),
decorations: DecorationSet.empty,
};
}
/**
* Apply state updates in response to document changes.
*/
private applyStateUpdates(
tr: Transaction,
pluginState: DiffPluginState,
state: EditorState,
): DiffPluginState {
return {
...this.updateTracked(tr, pluginState),
...this.updateHighlights(tr, pluginState, state),
};
}
private createDecorationSet(
commits: number[],
pluginState: DiffPluginState,
state: EditorState,
): DecorationSet {
const { tracked } = pluginState;
const decorations: Decoration[] = [];
for (const { commit, from, to } of tracked.blameMap) {
if (!isNumber(commit) || !commits.includes(commit)) {
continue;
}
decorations.push(Decoration.inline(from, to, { class: this.options.blameMarkerClass }));
}
return DecorationSet.create(state.doc, decorations);
}
/**
* Apply updates to the highlight decorations.
*/
private updateHighlights(
tr: Transaction,
pluginState: DiffPluginState,
state: EditorState,
): HighlightStateProps {
const { add, clear } = this.getMeta(tr);
if (isNumber(add) && pluginState.commits && !pluginState.commits.includes(add)) {
const commits = [...pluginState.commits, add];
const decorations = this.createDecorationSet(commits, pluginState, state);
return { decorations, commits };
}
if (isNumber(clear) && pluginState.commits && pluginState.commits.includes(clear)) {
const commits = pluginState.commits.filter((commit) => commit !== clear);
const decorations = this.createDecorationSet(commits, pluginState, state);
return { decorations, commits };
}
if (tr.docChanged && !isEmptyArray(pluginState.commits)) {
return {
decorations: pluginState.decorations.map(tr.mapping, tr.doc),
commits: pluginState.commits,
};
}
return { decorations: pluginState.decorations, commits: pluginState.commits ?? [] };
}
/**
* Apply updates for the commit tracker.
*
* Please note this isn't able to track marks and diffs. It can only
* track changes to content.
*/
private updateTracked(tr: Transaction, state: TrackedStateProps): TrackedStateProps {
let { tracked } = state;
if (tr.docChanged) {
tracked = tracked.applyTransform(tr);
}
const { message } = this.getMeta(tr);
if (message) {
tracked = tracked.applyCommit(message, tr.time);
}
return { tracked };
}
}
interface TrackedStateProps {
/**
* The tracked state.
*/
tracked: TrackState;
}
interface HighlightStateProps {
/**
* The decorations for highlighted commits.
*/
decorations: DecorationSet;
/**
* The id's of the commits to be highlighted.
*/
commits?: number[];
}
export interface DiffPluginState extends TrackedStateProps, HighlightStateProps {}
interface DiffMeta {
message?: string;
add?: number;
clear?: number;
}
type CommitId = number | 'first' | 'last';
export interface HandlerProps extends FromToProps {
/**
* The commit.
*/
commit: Commit;
}
declare global {
namespace Remirror {
interface AllExtensions {
diff: DiffExtension;
}
}
} | the_stack |
import * as pulumi from "@pulumi/pulumi";
import * as utilities from "../utilities";
/**
* Manages a Stream Analytics Output Table.
*
* ## Example Usage
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as azure from "@pulumi/azure";
*
* const exampleResourceGroup = azure.core.getResourceGroup({
* name: "example-resources",
* });
* const exampleJob = azure.streamanalytics.getJob({
* name: "example-job",
* resourceGroupName: azurerm_resource_group.example.name,
* });
* const exampleAccount = new azure.storage.Account("exampleAccount", {
* resourceGroupName: exampleResourceGroup.then(exampleResourceGroup => exampleResourceGroup.name),
* location: exampleResourceGroup.then(exampleResourceGroup => exampleResourceGroup.location),
* accountTier: "Standard",
* accountReplicationType: "LRS",
* });
* const exampleTable = new azure.storage.Table("exampleTable", {storageAccountName: exampleAccount.name});
* const exampleOutputTable = new azure.streamanalytics.OutputTable("exampleOutputTable", {
* streamAnalyticsJobName: azurerm_stream_analytics_job.example.name,
* resourceGroupName: azurerm_stream_analytics_job.example.resource_group_name,
* storageAccountName: exampleAccount.name,
* storageAccountKey: exampleAccount.primaryAccessKey,
* table: exampleTable.name,
* partitionKey: "foo",
* rowKey: "bar",
* batchSize: 100,
* });
* ```
*
* ## Import
*
* Stream Analytics Output to Table can be imported using the `resource id`, e.g.
*
* ```sh
* $ pulumi import azure:streamanalytics/outputTable:OutputTable example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.StreamAnalytics/streamingjobs/job1/outputs/output1
* ```
*/
export class OutputTable extends pulumi.CustomResource {
/**
* Get an existing OutputTable resource's state with the given name, ID, and optional extra
* properties used to qualify the lookup.
*
* @param name The _unique_ name of the resulting resource.
* @param id The _unique_ provider ID of the resource to lookup.
* @param state Any extra arguments used during the lookup.
* @param opts Optional settings to control the behavior of the CustomResource.
*/
public static get(name: string, id: pulumi.Input<pulumi.ID>, state?: OutputTableState, opts?: pulumi.CustomResourceOptions): OutputTable {
return new OutputTable(name, <any>state, { ...opts, id: id });
}
/** @internal */
public static readonly __pulumiType = 'azure:streamanalytics/outputTable:OutputTable';
/**
* Returns true if the given object is an instance of OutputTable. This is designed to work even
* when multiple copies of the Pulumi SDK have been loaded into the same process.
*/
public static isInstance(obj: any): obj is OutputTable {
if (obj === undefined || obj === null) {
return false;
}
return obj['__pulumiType'] === OutputTable.__pulumiType;
}
/**
* The number of records for a batch operation. Must be between `1` and `100`.
*/
public readonly batchSize!: pulumi.Output<number>;
/**
* The name of the Stream Output. Changing this forces a new resource to be created.
*/
public readonly name!: pulumi.Output<string>;
/**
* The name of the output column that contains the partition key.
*/
public readonly partitionKey!: pulumi.Output<string>;
/**
* The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
*/
public readonly resourceGroupName!: pulumi.Output<string>;
/**
* The name of the output column that contains the row key.
*/
public readonly rowKey!: pulumi.Output<string>;
/**
* The Access Key which should be used to connect to this Storage Account.
*/
public readonly storageAccountKey!: pulumi.Output<string>;
/**
* The name of the Storage Account.
*/
public readonly storageAccountName!: pulumi.Output<string>;
/**
* The name of the Stream Analytics Job. Changing this forces a new resource to be created.
*/
public readonly streamAnalyticsJobName!: pulumi.Output<string>;
/**
* The name of the table where the stream should be output to.
*/
public readonly table!: pulumi.Output<string>;
/**
* Create a OutputTable resource with the given unique name, arguments, and options.
*
* @param name The _unique_ name of the resource.
* @param args The arguments to use to populate this resource's properties.
* @param opts A bag of options that control this resource's behavior.
*/
constructor(name: string, args: OutputTableArgs, opts?: pulumi.CustomResourceOptions)
constructor(name: string, argsOrState?: OutputTableArgs | OutputTableState, opts?: pulumi.CustomResourceOptions) {
let inputs: pulumi.Inputs = {};
opts = opts || {};
if (opts.id) {
const state = argsOrState as OutputTableState | undefined;
inputs["batchSize"] = state ? state.batchSize : undefined;
inputs["name"] = state ? state.name : undefined;
inputs["partitionKey"] = state ? state.partitionKey : undefined;
inputs["resourceGroupName"] = state ? state.resourceGroupName : undefined;
inputs["rowKey"] = state ? state.rowKey : undefined;
inputs["storageAccountKey"] = state ? state.storageAccountKey : undefined;
inputs["storageAccountName"] = state ? state.storageAccountName : undefined;
inputs["streamAnalyticsJobName"] = state ? state.streamAnalyticsJobName : undefined;
inputs["table"] = state ? state.table : undefined;
} else {
const args = argsOrState as OutputTableArgs | undefined;
if ((!args || args.batchSize === undefined) && !opts.urn) {
throw new Error("Missing required property 'batchSize'");
}
if ((!args || args.partitionKey === undefined) && !opts.urn) {
throw new Error("Missing required property 'partitionKey'");
}
if ((!args || args.resourceGroupName === undefined) && !opts.urn) {
throw new Error("Missing required property 'resourceGroupName'");
}
if ((!args || args.rowKey === undefined) && !opts.urn) {
throw new Error("Missing required property 'rowKey'");
}
if ((!args || args.storageAccountKey === undefined) && !opts.urn) {
throw new Error("Missing required property 'storageAccountKey'");
}
if ((!args || args.storageAccountName === undefined) && !opts.urn) {
throw new Error("Missing required property 'storageAccountName'");
}
if ((!args || args.streamAnalyticsJobName === undefined) && !opts.urn) {
throw new Error("Missing required property 'streamAnalyticsJobName'");
}
if ((!args || args.table === undefined) && !opts.urn) {
throw new Error("Missing required property 'table'");
}
inputs["batchSize"] = args ? args.batchSize : undefined;
inputs["name"] = args ? args.name : undefined;
inputs["partitionKey"] = args ? args.partitionKey : undefined;
inputs["resourceGroupName"] = args ? args.resourceGroupName : undefined;
inputs["rowKey"] = args ? args.rowKey : undefined;
inputs["storageAccountKey"] = args ? args.storageAccountKey : undefined;
inputs["storageAccountName"] = args ? args.storageAccountName : undefined;
inputs["streamAnalyticsJobName"] = args ? args.streamAnalyticsJobName : undefined;
inputs["table"] = args ? args.table : undefined;
}
if (!opts.version) {
opts = pulumi.mergeOptions(opts, { version: utilities.getVersion()});
}
super(OutputTable.__pulumiType, name, inputs, opts);
}
}
/**
* Input properties used for looking up and filtering OutputTable resources.
*/
export interface OutputTableState {
/**
* The number of records for a batch operation. Must be between `1` and `100`.
*/
batchSize?: pulumi.Input<number>;
/**
* The name of the Stream Output. Changing this forces a new resource to be created.
*/
name?: pulumi.Input<string>;
/**
* The name of the output column that contains the partition key.
*/
partitionKey?: pulumi.Input<string>;
/**
* The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
*/
resourceGroupName?: pulumi.Input<string>;
/**
* The name of the output column that contains the row key.
*/
rowKey?: pulumi.Input<string>;
/**
* The Access Key which should be used to connect to this Storage Account.
*/
storageAccountKey?: pulumi.Input<string>;
/**
* The name of the Storage Account.
*/
storageAccountName?: pulumi.Input<string>;
/**
* The name of the Stream Analytics Job. Changing this forces a new resource to be created.
*/
streamAnalyticsJobName?: pulumi.Input<string>;
/**
* The name of the table where the stream should be output to.
*/
table?: pulumi.Input<string>;
}
/**
* The set of arguments for constructing a OutputTable resource.
*/
export interface OutputTableArgs {
/**
* The number of records for a batch operation. Must be between `1` and `100`.
*/
batchSize: pulumi.Input<number>;
/**
* The name of the Stream Output. Changing this forces a new resource to be created.
*/
name?: pulumi.Input<string>;
/**
* The name of the output column that contains the partition key.
*/
partitionKey: pulumi.Input<string>;
/**
* The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
*/
resourceGroupName: pulumi.Input<string>;
/**
* The name of the output column that contains the row key.
*/
rowKey: pulumi.Input<string>;
/**
* The Access Key which should be used to connect to this Storage Account.
*/
storageAccountKey: pulumi.Input<string>;
/**
* The name of the Storage Account.
*/
storageAccountName: pulumi.Input<string>;
/**
* The name of the Stream Analytics Job. Changing this forces a new resource to be created.
*/
streamAnalyticsJobName: pulumi.Input<string>;
/**
* The name of the table where the stream should be output to.
*/
table: pulumi.Input<string>;
} | the_stack |
export type Vec3 = [number, number, number];
export interface UVW {
u: number;
v: number;
w: number;
}
export interface TextureMapData {
colorCorrection: boolean;
horizontalBlending: boolean;
verticalBlending: boolean;
boostMipMapSharpness: number;
modifyTextureMap: {
brightness: number;
contrast: number;
};
offset: UVW;
scale: UVW;
turbulence: UVW;
clamp: boolean;
textureResolution: number | null;
bumpMultiplier: number;
imfChan: string | null;
filename: string;
reflectionType?: string;
texture?: HTMLImageElement;
}
/**
* The Material class.
*/
export class Material {
/**
* Constructor
* @param {String} name the unique name of the material
*/
// The values for the following attibutes
// are an array of R, G, B normalized values.
// Ka - Ambient Reflectivity
ambient: Vec3 = [0, 0, 0];
// Kd - Defuse Reflectivity
diffuse: Vec3 = [0, 0, 0];
// Ks
specular: Vec3 = [0, 0, 0];
// Ke
emissive: Vec3 = [0, 0, 0];
// Tf
transmissionFilter: Vec3 = [0, 0, 0];
// d
dissolve: number = 0;
// valid range is between 0 and 1000
specularExponent: number = 0;
// either d or Tr; valid values are normalized
transparency: number = 0;
// illum - the enum of the illumination model to use
illumination: number = 0;
// Ni - Set to "normal" (air).
refractionIndex: number = 1;
// sharpness
sharpness: number = 0;
// map_Kd
mapDiffuse: TextureMapData = emptyTextureOptions();
// map_Ka
mapAmbient: TextureMapData = emptyTextureOptions();
// map_Ks
mapSpecular: TextureMapData = emptyTextureOptions();
// map_Ns
mapSpecularExponent: TextureMapData = emptyTextureOptions();
// map_d
mapDissolve: TextureMapData = emptyTextureOptions();
// map_aat
antiAliasing: boolean = false;
// map_bump or bump
mapBump: TextureMapData = emptyTextureOptions();
// disp
mapDisplacement: TextureMapData = emptyTextureOptions();
// decal
mapDecal: TextureMapData = emptyTextureOptions();
// map_Ke
mapEmissive: TextureMapData = emptyTextureOptions();
// refl - when the reflection type is a cube, there will be multiple refl
// statements for each side of the cube. If it's a spherical
// reflection, there should only ever be one.
mapReflections: TextureMapData[] = [];
constructor(public name: string) {}
}
const SENTINEL_MATERIAL = new Material("sentinel");
/**
* https://en.wikipedia.org/wiki/Wavefront_.obj_file
* http://paulbourke.net/dataformats/mtl/
*/
export class MaterialLibrary {
/**
* Constructs the Material Parser
* @param mtlData the MTL file contents
*/
public currentMaterial: Material = SENTINEL_MATERIAL;
public materials: { [k: string]: Material } = {};
constructor(public data: string) {
this.parse();
}
/* eslint-disable camelcase */
/* the function names here disobey camelCase conventions
to make parsing/routing easier. see the parse function
documentation for more information. */
/**
* Creates a new Material object and adds to the registry.
* @param tokens the tokens associated with the directive
*/
parse_newmtl(tokens: string[]) {
const name = tokens[0];
// console.info('Parsing new Material:', name);
this.currentMaterial = new Material(name);
this.materials[name] = this.currentMaterial;
}
/**
* See the documenation for parse_Ka below for a better understanding.
*
* Given a list of possible color tokens, returns an array of R, G, and B
* color values.
*
* @param tokens the tokens associated with the directive
* @return {*} a 3 element array containing the R, G, and B values
* of the color.
*/
parseColor(tokens: string[]): Vec3 {
if (tokens[0] == "spectral") {
throw new Error(
"The MTL parser does not support spectral curve files. You will " +
"need to convert the MTL colors to either RGB or CIEXYZ.",
);
}
if (tokens[0] == "xyz") {
throw new Error(
"The MTL parser does not currently support XYZ colors. Either convert the " +
"XYZ values to RGB or create an issue to add support for XYZ",
);
}
// from my understanding of the spec, RGB values at this point
// will either be 3 floats or exactly 1 float, so that's the check
// that i'm going to perform here
if (tokens.length == 3) {
const [x, y, z] = tokens;
return [parseFloat(x), parseFloat(y), parseFloat(z)];
}
// Since tokens at this point has a length of 3, we're going to assume
// it's exactly 1, skipping the check for 2.
const value = parseFloat(tokens[0]);
// in this case, all values are equivalent
return [value, value, value];
}
/**
* Parse the ambient reflectivity
*
* A Ka directive can take one of three forms:
* - Ka r g b
* - Ka spectral file.rfl
* - Ka xyz x y z
* These three forms are mutually exclusive in that only one
* declaration can exist per material. It is considered a syntax
* error otherwise.
*
* The "Ka" form specifies the ambient reflectivity using RGB values.
* The "g" and "b" values are optional. If only the "r" value is
* specified, then the "g" and "b" values are assigned the value of
* "r". Values are normally in the range 0.0 to 1.0. Values outside
* of this range increase or decrease the reflectivity accordingly.
*
* The "Ka spectral" form specifies the ambient reflectivity using a
* spectral curve. "file.rfl" is the name of the ".rfl" file containing
* the curve data. "factor" is an optional argument which is a multiplier
* for the values in the .rfl file and defaults to 1.0 if not specified.
*
* The "Ka xyz" form specifies the ambient reflectivity using CIEXYZ values.
* "x y z" are the values of the CIEXYZ color space. The "y" and "z" arguments
* are optional and take on the value of the "x" component if only "x" is
* specified. The "x y z" values are normally in the range of 0.0 to 1.0 and
* increase or decrease ambient reflectivity accordingly outside of that
* range.
*
* @param tokens the tokens associated with the directive
*/
parse_Ka(tokens: string[]) {
this.currentMaterial.ambient = this.parseColor(tokens);
}
/**
* Diffuse Reflectivity
*
* Similar to the Ka directive. Simply replace "Ka" with "Kd" and the rules
* are the same
*
* @param tokens the tokens associated with the directive
*/
parse_Kd(tokens: string[]) {
this.currentMaterial.diffuse = this.parseColor(tokens);
}
/**
* Spectral Reflectivity
*
* Similar to the Ka directive. Simply replace "Ks" with "Kd" and the rules
* are the same
*
* @param tokens the tokens associated with the directive
*/
parse_Ks(tokens: string[]) {
this.currentMaterial.specular = this.parseColor(tokens);
}
/**
* Emissive
*
* The amount and color of light emitted by the object.
*
* @param tokens the tokens associated with the directive
*/
parse_Ke(tokens: string[]) {
this.currentMaterial.emissive = this.parseColor(tokens);
}
/**
* Transmission Filter
*
* Any light passing through the object is filtered by the transmission
* filter, which only allows specific colors to pass through. For example, Tf
* 0 1 0 allows all of the green to pass through and filters out all of the
* red and blue.
*
* Similar to the Ka directive. Simply replace "Ks" with "Tf" and the rules
* are the same
*
* @param tokens the tokens associated with the directive
*/
parse_Tf(tokens: string[]) {
this.currentMaterial.transmissionFilter = this.parseColor(tokens);
}
/**
* Specifies the dissolve for the current material.
*
* Statement: d [-halo] `factor`
*
* Example: "d 0.5"
*
* The factor is the amount this material dissolves into the background. A
* factor of 1.0 is fully opaque. This is the default when a new material is
* created. A factor of 0.0 is fully dissolved (completely transparent).
*
* Unlike a real transparent material, the dissolve does not depend upon
* material thickness nor does it have any spectral character. Dissolve works
* on all illumination models.
*
* The dissolve statement allows for an optional "-halo" flag which indicates
* that a dissolve is dependent on the surface orientation relative to the
* viewer. For example, a sphere with the following dissolve, "d -halo 0.0",
* will be fully dissolved at its center and will appear gradually more opaque
* toward its edge.
*
* "factor" is the minimum amount of dissolve applied to the material. The
* amount of dissolve will vary between 1.0 (fully opaque) and the specified
* "factor". The formula is:
*
* dissolve = 1.0 - (N*v)(1.0-factor)
*
* @param tokens the tokens associated with the directive
*/
parse_d(tokens: string[]) {
// this ignores the -halo option as I can't find any documentation on what
// it's supposed to be.
this.currentMaterial.dissolve = parseFloat(tokens.pop() || "0");
}
/**
* The "illum" statement specifies the illumination model to use in the
* material. Illumination models are mathematical equations that represent
* various material lighting and shading effects.
*
* The illumination number can be a number from 0 to 10. The following are
* the list of illumination enumerations and their summaries:
* 0. Color on and Ambient off
* 1. Color on and Ambient on
* 2. Highlight on
* 3. Reflection on and Ray trace on
* 4. Transparency: Glass on, Reflection: Ray trace on
* 5. Reflection: Fresnel on and Ray trace on
* 6. Transparency: Refraction on, Reflection: Fresnel off and Ray trace on
* 7. Transparency: Refraction on, Reflection: Fresnel on and Ray trace on
* 8. Reflection on and Ray trace off
* 9. Transparency: Glass on, Reflection: Ray trace off
* 10. Casts shadows onto invisible surfaces
*
* Example: "illum 2" to specify the "Highlight on" model
*
* @param tokens the tokens associated with the directive
*/
parse_illum(tokens: string[]) {
this.currentMaterial.illumination = parseInt(tokens[0]);
}
/**
* Optical Density (AKA Index of Refraction)
*
* Statement: Ni `index`
*
* Example: Ni 1.0
*
* Specifies the optical density for the surface. `index` is the value
* for the optical density. The values can range from 0.001 to 10. A value of
* 1.0 means that light does not bend as it passes through an object.
* Increasing the optical_density increases the amount of bending. Glass has
* an index of refraction of about 1.5. Values of less than 1.0 produce
* bizarre results and are not recommended
*
* @param tokens the tokens associated with the directive
*/
parse_Ni(tokens: string[]) {
this.currentMaterial.refractionIndex = parseFloat(tokens[0]);
}
/**
* Specifies the specular exponent for the current material. This defines the
* focus of the specular highlight.
*
* Statement: Ns `exponent`
*
* Example: "Ns 250"
*
* `exponent` is the value for the specular exponent. A high exponent results
* in a tight, concentrated highlight. Ns Values normally range from 0 to
* 1000.
*
* @param tokens the tokens associated with the directive
*/
parse_Ns(tokens: string[]) {
this.currentMaterial.specularExponent = parseInt(tokens[0]);
}
/**
* Specifies the sharpness of the reflections from the local reflection map.
*
* Statement: sharpness `value`
*
* Example: "sharpness 100"
*
* If a material does not have a local reflection map defined in its material
* defintions, sharpness will apply to the global reflection map defined in
* PreView.
*
* `value` can be a number from 0 to 1000. The default is 60. A high value
* results in a clear reflection of objects in the reflection map.
*
* Tip: sharpness values greater than 100 introduce aliasing effects in
* flat surfaces that are viewed at a sharp angle.
*
* @param tokens the tokens associated with the directive
*/
parse_sharpness(tokens: string[]) {
this.currentMaterial.sharpness = parseInt(tokens[0]);
}
/**
* Parses the -cc flag and updates the options object with the values.
*
* @param values the values passed to the -cc flag
* @param options the Object of all image options
*/
parse_cc(values: string[], options: TextureMapData) {
options.colorCorrection = values[0] == "on";
}
/**
* Parses the -blendu flag and updates the options object with the values.
*
* @param values the values passed to the -blendu flag
* @param options the Object of all image options
*/
parse_blendu(values: string[], options: TextureMapData) {
options.horizontalBlending = values[0] == "on";
}
/**
* Parses the -blendv flag and updates the options object with the values.
*
* @param values the values passed to the -blendv flag
* @param options the Object of all image options
*/
parse_blendv(values: string[], options: TextureMapData) {
options.verticalBlending = values[0] == "on";
}
/**
* Parses the -boost flag and updates the options object with the values.
*
* @param values the values passed to the -boost flag
* @param options the Object of all image options
*/
parse_boost(values: string[], options: TextureMapData) {
options.boostMipMapSharpness = parseFloat(values[0]);
}
/**
* Parses the -mm flag and updates the options object with the values.
*
* @param values the values passed to the -mm flag
* @param options the Object of all image options
*/
parse_mm(values: string[], options: TextureMapData) {
options.modifyTextureMap.brightness = parseFloat(values[0]);
options.modifyTextureMap.contrast = parseFloat(values[1]);
}
/**
* Parses and sets the -o, -s, and -t u, v, and w values
*
* @param values the values passed to the -o, -s, -t flag
* @param {Object} option the Object of either the -o, -s, -t option
* @param {Integer} defaultValue the Object of all image options
*/
parse_ost(values: string[], option: UVW, defaultValue: number) {
while (values.length < 3) {
values.push(defaultValue.toString());
}
option.u = parseFloat(values[0]);
option.v = parseFloat(values[1]);
option.w = parseFloat(values[2]);
}
/**
* Parses the -o flag and updates the options object with the values.
*
* @param values the values passed to the -o flag
* @param options the Object of all image options
*/
parse_o(values: string[], options: TextureMapData) {
this.parse_ost(values, options.offset, 0);
}
/**
* Parses the -s flag and updates the options object with the values.
*
* @param values the values passed to the -s flag
* @param options the Object of all image options
*/
parse_s(values: string[], options: TextureMapData) {
this.parse_ost(values, options.scale, 1);
}
/**
* Parses the -t flag and updates the options object with the values.
*
* @param values the values passed to the -t flag
* @param options the Object of all image options
*/
parse_t(values: string[], options: TextureMapData) {
this.parse_ost(values, options.turbulence, 0);
}
/**
* Parses the -texres flag and updates the options object with the values.
*
* @param values the values passed to the -texres flag
* @param options the Object of all image options
*/
parse_texres(values: string[], options: TextureMapData) {
options.textureResolution = parseFloat(values[0]);
}
/**
* Parses the -clamp flag and updates the options object with the values.
*
* @param values the values passed to the -clamp flag
* @param options the Object of all image options
*/
parse_clamp(values: string[], options: TextureMapData) {
options.clamp = values[0] == "on";
}
/**
* Parses the -bm flag and updates the options object with the values.
*
* @param values the values passed to the -bm flag
* @param options the Object of all image options
*/
parse_bm(values: string[], options: TextureMapData) {
options.bumpMultiplier = parseFloat(values[0]);
}
/**
* Parses the -imfchan flag and updates the options object with the values.
*
* @param values the values passed to the -imfchan flag
* @param options the Object of all image options
*/
parse_imfchan(values: string[], options: TextureMapData) {
options.imfChan = values[0];
}
/**
* This only exists for relection maps and denotes the type of reflection.
*
* @param values the values passed to the -type flag
* @param options the Object of all image options
*/
parse_type(values: string[], options: TextureMapData) {
options.reflectionType = values[0];
}
/**
* Parses the texture's options and returns an options object with the info
*
* @param tokens all of the option tokens to pass to the texture
* @return {Object} a complete object of objects to apply to the texture
*/
parseOptions(tokens: string[]): TextureMapData {
const options = emptyTextureOptions();
let option;
let values;
const optionsToValues: { [k: string]: string[] } = {};
tokens.reverse();
while (tokens.length) {
// token is guaranteed to exists here, hence the explicit "as"
const token = tokens.pop() as string;
if (token.startsWith("-")) {
option = token.substr(1);
optionsToValues[option] = [];
} else if (option) {
optionsToValues[option].push(token);
}
}
for (option in optionsToValues) {
if (!optionsToValues.hasOwnProperty(option)) {
continue;
}
values = optionsToValues[option];
const optionMethod = (this as any)[`parse_${option}`];
if (optionMethod) {
optionMethod.bind(this)(values, options);
}
}
return options;
}
/**
* Parses the given texture map line.
*
* @param tokens all of the tokens representing the texture
* @return a complete object of objects to apply to the texture
*/
parseMap(tokens: string[]): TextureMapData {
// according to wikipedia:
// (https://en.wikipedia.org/wiki/Wavefront_.obj_file#Vendor_specific_alterations)
// there is at least one vendor that places the filename before the options
// rather than after (which is to spec). All options start with a '-'
// so if the first token doesn't start with a '-', we're going to assume
// it's the name of the map file.
let optionsString;
let filename = "";
if (!tokens[0].startsWith("-")) {
[filename, ...optionsString] = tokens;
} else {
filename = tokens.pop() as string;
optionsString = tokens;
}
const options = this.parseOptions(optionsString);
options.filename = filename.replace(/\\/g, "/");
return options;
}
/**
* Parses the ambient map.
*
* @param tokens list of tokens for the map_Ka direcive
*/
parse_map_Ka(tokens: string[]) {
this.currentMaterial.mapAmbient = this.parseMap(tokens);
}
/**
* Parses the diffuse map.
*
* @param tokens list of tokens for the map_Kd direcive
*/
parse_map_Kd(tokens: string[]) {
this.currentMaterial.mapDiffuse = this.parseMap(tokens);
}
/**
* Parses the specular map.
*
* @param tokens list of tokens for the map_Ks direcive
*/
parse_map_Ks(tokens: string[]) {
this.currentMaterial.mapSpecular = this.parseMap(tokens);
}
/**
* Parses the emissive map.
*
* @param tokens list of tokens for the map_Ke direcive
*/
parse_map_Ke(tokens: string[]) {
this.currentMaterial.mapEmissive = this.parseMap(tokens);
}
/**
* Parses the specular exponent map.
*
* @param tokens list of tokens for the map_Ns direcive
*/
parse_map_Ns(tokens: string[]) {
this.currentMaterial.mapSpecularExponent = this.parseMap(tokens);
}
/**
* Parses the dissolve map.
*
* @param tokens list of tokens for the map_d direcive
*/
parse_map_d(tokens: string[]) {
this.currentMaterial.mapDissolve = this.parseMap(tokens);
}
/**
* Parses the anti-aliasing option.
*
* @param tokens list of tokens for the map_aat direcive
*/
parse_map_aat(tokens: string[]) {
this.currentMaterial.antiAliasing = tokens[0] == "on";
}
/**
* Parses the bump map.
*
* @param tokens list of tokens for the map_bump direcive
*/
parse_map_bump(tokens: string[]) {
this.currentMaterial.mapBump = this.parseMap(tokens);
}
/**
* Parses the bump map.
*
* @param tokens list of tokens for the bump direcive
*/
parse_bump(tokens: string[]) {
this.parse_map_bump(tokens);
}
/**
* Parses the disp map.
*
* @param tokens list of tokens for the disp direcive
*/
parse_disp(tokens: string[]) {
this.currentMaterial.mapDisplacement = this.parseMap(tokens);
}
/**
* Parses the decal map.
*
* @param tokens list of tokens for the map_decal direcive
*/
parse_decal(tokens: string[]) {
this.currentMaterial.mapDecal = this.parseMap(tokens);
}
/**
* Parses the refl map.
*
* @param tokens list of tokens for the refl direcive
*/
parse_refl(tokens: string[]) {
this.currentMaterial.mapReflections.push(this.parseMap(tokens));
}
/**
* Parses the MTL file.
*
* Iterates line by line parsing each MTL directive.
*
* This function expects the first token in the line
* to be a valid MTL directive. That token is then used
* to try and run a method on this class. parse_[directive]
* E.g., the `newmtl` directive would try to call the method
* parse_newmtl. Each parsing function takes in the remaining
* list of tokens and updates the currentMaterial class with
* the attributes provided.
*/
parse() {
const lines = this.data.split(/\r?\n/);
for (let line of lines) {
line = line.trim();
if (!line || line.startsWith("#")) {
continue;
}
const [directive, ...tokens] = line.split(/\s/);
const parseMethod = (this as any)[`parse_${directive}`];
if (!parseMethod) {
console.warn(`Don't know how to parse the directive: "${directive}"`);
continue;
}
// console.log(`Parsing "${directive}" with tokens: ${tokens}`);
parseMethod.bind(this)(tokens);
}
// some cleanup. These don't need to be exposed as public data.
delete this.data;
this.currentMaterial = SENTINEL_MATERIAL;
}
/* eslint-enable camelcase*/
}
function emptyTextureOptions(): TextureMapData {
return {
colorCorrection: false,
horizontalBlending: true,
verticalBlending: true,
boostMipMapSharpness: 0,
modifyTextureMap: {
brightness: 0,
contrast: 1,
},
offset: { u: 0, v: 0, w: 0 },
scale: { u: 1, v: 1, w: 1 },
turbulence: { u: 0, v: 0, w: 0 },
clamp: false,
textureResolution: null,
bumpMultiplier: 1,
imfChan: null,
filename: "",
};
} | the_stack |
import { Injectable } from '@angular/core';
import { CameraOptions } from '@ionic-native/camera/ngx';
import { FileEntry } from '@ionic-native/file/ngx';
import { MediaFile, CaptureError, CaptureAudioOptions, CaptureVideoOptions } from '@ionic-native/media-capture/ngx';
import { Subject } from 'rxjs';
import { CoreApp } from '@services/app';
import { CoreFile, CoreFileProvider } from '@services/file';
import { CoreFilepool } from '@services/filepool';
import { CoreSites } from '@services/sites';
import { CoreMimetypeUtils } from '@services/utils/mimetype';
import { CoreTextUtils } from '@services/utils/text';
import { CoreTimeUtils } from '@services/utils/time';
import { CoreUtils } from '@services/utils/utils';
import { CoreWSFile, CoreWSFileUploadOptions, CoreWSUploadFileResult } from '@services/ws';
import { makeSingleton, Translate, MediaCapture, ModalController, Camera } from '@singletons';
import { CoreLogger } from '@singletons/logger';
import { CoreEmulatorCaptureMediaComponent } from '@features/emulator/components/capture-media/capture-media';
import { CoreError } from '@classes/errors/error';
import { CoreSite } from '@classes/site';
import { CoreFileEntry, CoreFileHelper } from '@services/file-helper';
/**
* File upload options.
*/
export interface CoreFileUploaderOptions extends CoreWSFileUploadOptions {
/**
* Whether the file should be deleted after the upload (if success).
*/
deleteAfterUpload?: boolean;
}
/**
* Service to upload files.
*/
@Injectable({ providedIn: 'root' })
export class CoreFileUploaderProvider {
static readonly LIMITED_SIZE_WARNING = 1048576; // 1 MB.
static readonly WIFI_SIZE_WARNING = 10485760; // 10 MB.
protected logger: CoreLogger;
// Observers to notify when a media file starts/stops being recorded/selected.
onGetPicture: Subject<boolean> = new Subject<boolean>();
onAudioCapture: Subject<boolean> = new Subject<boolean>();
onVideoCapture: Subject<boolean> = new Subject<boolean>();
constructor() {
this.logger = CoreLogger.getInstance('CoreFileUploaderProvider');
}
/**
* Add a dot to the beginning of an extension.
*
* @param extension Extension.
* @return Treated extension.
*/
protected addDot(extension: string): string {
return '.' + extension;
}
/**
* Compares two file lists and returns if they are different.
*
* @param a First file list.
* @param b Second file list.
* @return Whether both lists are different.
*/
areFileListDifferent(a: CoreFileEntry[], b: CoreFileEntry[]): boolean {
a = a || [];
b = b || [];
if (a.length != b.length) {
return true;
}
// Currently we are going to compare the order of the files as well.
// This function can be improved comparing more fields or not comparing the order.
for (let i = 0; i < a.length; i++) {
if (CoreFile.getFileName(a[i]) != CoreFile.getFileName(b[i])) {
return true;
}
}
return false;
}
/**
* Check if a certain site allows deleting draft files.
*
* @param siteId Site Id. If not defined, use current site.
* @return Promise resolved with true if can delete.
* @since 3.10
*/
async canDeleteDraftFiles(siteId?: string): Promise<boolean> {
try {
const site = await CoreSites.getSite(siteId);
return this.canDeleteDraftFilesInSite(site);
} catch (error) {
return false;
}
}
/**
* Check if a certain site allows deleting draft files.
*
* @param site Site. If not defined, use current site.
* @return Whether draft files can be deleted.
* @since 3.10
*/
canDeleteDraftFilesInSite(site?: CoreSite): boolean {
site = site || CoreSites.getCurrentSite();
return !!(site?.wsAvailable('core_files_delete_draft_files'));
}
/**
* Start the audio recorder application and return information about captured audio clip files.
*
* @param options Options.
* @return Promise resolved with the result.
*/
async captureAudio(options: CaptureAudioOptions): Promise<MediaFile[] | CaptureError> {
this.onAudioCapture.next(true);
try {
return await MediaCapture.captureAudio(options);
} finally {
this.onAudioCapture.next(false);
}
}
/**
* Record an audio file without using an external app.
*
* @return Promise resolved with the file.
*/
async captureAudioInApp(): Promise<MediaFile> {
const params = {
type: 'audio',
};
const modal = await ModalController.create({
component: CoreEmulatorCaptureMediaComponent,
cssClass: 'core-modal-fullscreen',
componentProps: params,
backdropDismiss: false,
});
await modal.present();
const result = await modal.onWillDismiss();
if (result.role == 'success') {
return result.data[0];
} else {
throw result.data;
}
}
/**
* Start the video recorder application and return information about captured video clip files.
*
* @param options Options.
* @return Promise resolved with the result.
*/
async captureVideo(options: CaptureVideoOptions): Promise<MediaFile[] | CaptureError> {
this.onVideoCapture.next(true);
try {
return await MediaCapture.captureVideo(options);
} finally {
this.onVideoCapture.next(false);
}
}
/**
* Clear temporary attachments to be uploaded.
* Attachments already saved in an offline store will NOT be deleted, only files in tmp folder will be deleted.
*
* @param files List of files.
*/
clearTmpFiles(files: (CoreWSFile | FileEntry)[]): void {
// Delete the temporary files.
files.forEach((file) => {
if ('remove' in file && CoreFile.removeBasePath(file.toURL()).startsWith(CoreFileProvider.TMPFOLDER)) {
// Pass an empty function to prevent missing parameter error.
file.remove(() => {
// Nothing to do.
});
}
});
}
/**
* Delete draft files.
*
* @param draftId Draft ID.
* @param files Files to delete.
* @param siteId Site ID. If not defined, current site.
* @return Promise resolved when done.
*/
async deleteDraftFiles(draftId: number, files: { filepath: string; filename: string }[], siteId?: string): Promise<void> {
const site = await CoreSites.getSite(siteId);
const params = {
draftitemid: draftId,
files: files,
};
return site.write('core_files_delete_draft_files', params);
}
/**
* Get the upload options for a file taken with the Camera Cordova plugin.
*
* @param uri File URI.
* @param isFromAlbum True if the image was taken from album, false if it's a new image taken with camera.
* @return Options.
*/
getCameraUploadOptions(uri: string, isFromAlbum?: boolean): CoreFileUploaderOptions {
const extension = CoreMimetypeUtils.guessExtensionFromUrl(uri);
const mimetype = CoreMimetypeUtils.getMimeType(extension);
const isIOS = CoreApp.isIOS();
const options: CoreFileUploaderOptions = {
deleteAfterUpload: !isFromAlbum,
mimeType: mimetype,
};
const fileName = CoreFile.getFileAndDirectoryFromPath(uri).name;
if (isIOS && (mimetype == 'image/jpeg' || mimetype == 'image/png')) {
// In iOS, the pictures can have repeated names, even if they come from the album.
// Add a timestamp to the filename to make it unique.
const split = fileName.split('.');
split[0] += '_' + CoreTimeUtils.readableTimestamp();
options.fileName = split.join('.');
} else {
// Use the same name that the file already has.
options.fileName = fileName;
}
if (isFromAlbum) {
// If the file was picked from the album, delete it only if it was copied to the app's folder.
options.deleteAfterUpload = CoreFile.isFileInAppFolder(uri);
if (CoreApp.isAndroid()) {
// Picking an image from album in Android adds a timestamp at the end of the file. Delete it.
options.fileName = options.fileName.replace(/(\.[^.]*)\?[^.]*$/, '$1');
}
}
return options;
}
/**
* Given a list of original files and a list of current files, return the list of files to delete.
*
* @param originalFiles Original files.
* @param currentFiles Current files.
* @return List of files to delete.
*/
getFilesToDelete(
originalFiles: CoreWSFile[],
currentFiles: CoreFileEntry[],
): { filepath: string; filename: string }[] {
const filesToDelete: { filepath: string; filename: string }[] = [];
currentFiles = currentFiles || [];
originalFiles.forEach((file) => {
const stillInList = currentFiles.some((currentFile) =>
CoreFileHelper.getFileUrl(<CoreWSFile> currentFile) == CoreFileHelper.getFileUrl(file));
if (!stillInList) {
filesToDelete.push({
filepath: file.filepath!,
filename: file.filename!,
});
}
});
return filesToDelete;
}
/**
* Get the upload options for a file of any type.
*
* @param uri File URI.
* @param name File name.
* @param mimetype File mimetype.
* @param deleteAfterUpload Whether the file should be deleted after upload.
* @param fileArea File area to upload the file to. It defaults to 'draft'.
* @param itemId Draft ID to upload the file to, 0 to create new.
* @return Options.
*/
getFileUploadOptions(
uri: string,
name: string,
mimetype?: string,
deleteAfterUpload?: boolean,
fileArea?: string,
itemId?: number,
): CoreFileUploaderOptions {
const options: CoreFileUploaderOptions = {};
options.fileName = name;
options.mimeType = mimetype || CoreMimetypeUtils.getMimeType(
CoreMimetypeUtils.getFileExtension(options.fileName),
);
options.deleteAfterUpload = !!deleteAfterUpload;
options.itemId = itemId || 0;
options.fileArea = fileArea;
return options;
}
/**
* Get the upload options for a file taken with the media capture Cordova plugin.
*
* @param mediaFile File object to upload.
* @return Options.
*/
getMediaUploadOptions(mediaFile: MediaFile): CoreFileUploaderOptions {
const options: CoreFileUploaderOptions = {};
let filename = mediaFile.name;
if (!filename.match(/_\d{14}(\..*)?$/)) {
// Add a timestamp to the filename to make it unique.
const split = filename.split('.');
split[0] += '_' + CoreTimeUtils.readableTimestamp();
filename = split.join('.');
}
options.fileName = filename;
options.deleteAfterUpload = true;
if (mediaFile.type) {
options.mimeType = mediaFile.type;
} else {
options.mimeType = CoreMimetypeUtils.getMimeType(
CoreMimetypeUtils.getFileExtension(options.fileName),
);
}
return options;
}
/**
* Take a picture or video, or load one from the library.
*
* @param options Options.
* @return Promise resolved with the result.
*/
getPicture(options: CameraOptions): Promise<string> {
this.onGetPicture.next(true);
return Camera.getPicture(options).finally(() => {
this.onGetPicture.next(false);
});
}
/**
* Get the files stored in a folder, marking them as offline.
*
* @param folderPath Folder where to get the files.
* @return Promise resolved with the list of files.
*/
async getStoredFiles(folderPath: string): Promise<FileEntry[]> {
return <FileEntry[]> await CoreFile.getDirectoryContents(folderPath);
}
/**
* Get stored files from combined online and offline file object.
*
* @param filesObject The combined offline and online files object.
* @param folderPath Folder path to get files from.
* @return Promise resolved with files.
*/
async getStoredFilesFromOfflineFilesObject(
filesObject: CoreFileUploaderStoreFilesResult,
folderPath: string,
): Promise<CoreFileEntry[]> {
let files: CoreFileEntry[] = [];
if (filesObject.online.length > 0) {
files = CoreUtils.clone(filesObject.online);
}
if (filesObject.offline > 0) {
const offlineFiles = await CoreUtils.ignoreErrors(this.getStoredFiles(folderPath));
if (offlineFiles) {
files = files.concat(offlineFiles);
}
}
return files;
}
/**
* Check if a file's mimetype is invalid based on the list of accepted mimetypes. This function needs either the file's
* mimetype or the file's path/name.
*
* @param mimetypes List of supported mimetypes. If undefined, all mimetypes supported.
* @param path File's path or name.
* @param mimetype File's mimetype.
* @return Undefined if file is valid, error message if file is invalid.
*/
isInvalidMimetype(mimetypes?: string[], path?: string, mimetype?: string): string | undefined {
let extension: string | undefined;
if (mimetypes) {
// Verify that the mimetype of the file is supported.
if (mimetype) {
extension = CoreMimetypeUtils.getExtension(mimetype);
if (mimetypes.indexOf(mimetype) == -1) {
// Get the "main" mimetype of the extension.
// It's possible that the list of accepted mimetypes only includes the "main" mimetypes.
mimetype = CoreMimetypeUtils.getMimeType(extension);
}
} else if (path) {
extension = CoreMimetypeUtils.getFileExtension(path);
mimetype = CoreMimetypeUtils.getMimeType(extension);
} else {
throw new CoreError('No mimetype or path supplied.');
}
if (mimetype && mimetypes.indexOf(mimetype) == -1) {
extension = extension || Translate.instant('core.unknown');
return Translate.instant('core.fileuploader.invalidfiletype', { $a: extension });
}
}
}
/**
* Mark files as offline.
*
* @param files Files to mark as offline.
* @return Files marked as offline.
* @deprecated since 3.9.5. Now stored files no longer have an offline property.
*/
markOfflineFiles(files: FileEntry[]): FileEntry[] {
return files;
}
/**
* Parse filetypeList to get the list of allowed mimetypes and the data to render information.
*
* @param filetypeList Formatted string list where the mimetypes can be checked.
* @return Mimetypes and the filetypes informations. Undefined if all types supported.
*/
prepareFiletypeList(filetypeList: string): CoreFileUploaderTypeList | undefined {
filetypeList = filetypeList?.trim();
if (!filetypeList || filetypeList == '*') {
// All types supported, return undefined.
return;
}
const filetypes = filetypeList.split(/[;, ]+/g);
const mimetypes: Record<string, boolean> = {}; // Use an object to prevent duplicates.
const typesInfo: CoreFileUploaderTypeListInfoEntry[] = [];
filetypes.forEach((filetype) => {
filetype = filetype.trim();
if (!filetype) {
return;
}
if (filetype.indexOf('/') != -1) {
// It's a mimetype.
typesInfo.push({
name: CoreMimetypeUtils.getMimetypeDescription(filetype),
extlist: CoreMimetypeUtils.getExtensions(filetype).map(this.addDot).join(' '),
});
mimetypes[filetype] = true;
} else if (filetype.indexOf('.') === 0) {
// It's an extension.
const mimetype = CoreMimetypeUtils.getMimeType(filetype);
typesInfo.push({
name: mimetype && CoreMimetypeUtils.getMimetypeDescription(mimetype),
extlist: filetype,
});
if (mimetype) {
mimetypes[mimetype] = true;
}
} else {
// It's a group.
const groupExtensions = CoreMimetypeUtils.getGroupMimeInfo(filetype, 'extensions');
const groupMimetypes = CoreMimetypeUtils.getGroupMimeInfo(filetype, 'mimetypes');
if (groupExtensions && groupExtensions.length > 0) {
typesInfo.push({
name: CoreMimetypeUtils.getTranslatedGroupName(filetype),
extlist: groupExtensions.map(this.addDot).join(' '),
});
groupMimetypes?.forEach((mimetype) => {
if (mimetype) {
mimetypes[mimetype] = true;
}
});
} else {
// Treat them as extensions.
filetype = this.addDot(filetype);
const mimetype = CoreMimetypeUtils.getMimeType(filetype);
typesInfo.push({
name: mimetype && CoreMimetypeUtils.getMimetypeDescription(mimetype),
extlist: filetype,
});
if (mimetype) {
mimetypes[mimetype] = true;
}
}
}
});
return {
info: typesInfo,
mimetypes: Object.keys(mimetypes),
};
}
/**
* Given a list of files (either online files or local files), store the local files in a local folder
* to be uploaded later.
*
* @param folderPath Path of the folder where to store the files.
* @param files List of files.
* @return Promise resolved if success.
*/
async storeFilesToUpload(
folderPath: string,
files: CoreFileEntry[],
): Promise<CoreFileUploaderStoreFilesResult> {
const result: CoreFileUploaderStoreFilesResult = {
online: [],
offline: 0,
};
if (!files || !files.length) {
return result;
}
// Remove unused files from previous saves.
await CoreFile.removeUnusedFiles(folderPath, files);
await Promise.all(files.map(async (file) => {
if (!CoreUtils.isFileEntry(file)) {
// It's an online file, add it to the result and ignore it.
result.online.push({
filename: file.filename,
fileurl: CoreFileHelper.getFileUrl(file),
});
} else if (file.fullPath?.indexOf(folderPath) != -1) {
// File already in the submission folder.
result.offline++;
} else {
// Local file, copy it.
// Use copy instead of move to prevent having a unstable state if some copies succeed and others don't.
const destFile = CoreTextUtils.concatenatePaths(folderPath, file.name);
result.offline++;
await CoreFile.copyFile(file.toURL(), destFile);
}
}));
return result;
}
/**
* Upload a file.
*
* @param uri File URI.
* @param options Options for the upload.
* @param onProgress Function to call on progress.
* @param siteId Id of the site to upload the file to. If not defined, use current site.
* @return Promise resolved when done.
*/
async uploadFile(
uri: string,
options?: CoreFileUploaderOptions,
onProgress?: (event: ProgressEvent) => void,
siteId?: string,
): Promise<CoreWSUploadFileResult> {
options = options || {};
const deleteAfterUpload = options.deleteAfterUpload;
const ftOptions = CoreUtils.clone(options);
delete ftOptions.deleteAfterUpload;
const site = await CoreSites.getSite(siteId);
const result = await site.uploadFile(uri, ftOptions, onProgress);
if (deleteAfterUpload) {
CoreFile.removeExternalFile(uri);
}
return result;
}
/**
* Given a list of files (either online files or local files), upload the local files to the draft area.
* Local files are not deleted from the device after upload.
*
* @param itemId Draft ID.
* @param files List of files.
* @param siteId Site ID. If not defined, current site.
* @return Promise resolved with the itemId.
*/
async uploadFiles(itemId: number, files: CoreFileEntry[], siteId?: string): Promise<void> {
siteId = siteId || CoreSites.getCurrentSiteId();
if (!files || !files.length) {
return;
}
// Index the online files by name.
const usedNames: {[name: string]: CoreFileEntry} = {};
const filesToUpload: FileEntry[] = [];
files.forEach((file) => {
if (CoreUtils.isFileEntry(file)) {
filesToUpload.push(<FileEntry> file);
} else {
// It's an online file.
usedNames[file.filename!.toLowerCase()] = file;
}
});
await Promise.all(filesToUpload.map(async (file) => {
// Make sure the file name is unique in the area.
const name = CoreFile.calculateUniqueName(usedNames, file.name);
usedNames[name] = file;
// Now upload the file.
const options = this.getFileUploadOptions(file.toURL(), name, undefined, false, 'draft', itemId);
await this.uploadFile(file.toURL(), options, undefined, siteId);
}));
}
/**
* Upload a file to a draft area and return the draft ID.
*
* If the file is an online file it will be downloaded and then re-uploaded.
* If the file is a local file it will not be deleted from the device after upload.
*
* @param file Online file or local FileEntry.
* @param itemId Draft ID to use. Undefined or 0 to create a new draft ID.
* @param component The component to set to the downloaded files.
* @param componentId An ID to use in conjunction with the component.
* @param siteId Site ID. If not defined, current site.
* @return Promise resolved with the itemId.
*/
async uploadOrReuploadFile(
file: CoreFileEntry,
itemId?: number,
component?: string,
componentId?: string | number,
siteId?: string,
): Promise<number> {
siteId = siteId || CoreSites.getCurrentSiteId();
let fileName: string | undefined;
let fileEntry: FileEntry | undefined;
const isOnline = !CoreUtils.isFileEntry(file);
if (CoreUtils.isFileEntry(file)) {
// Local file, we already have the file entry.
fileName = file.name;
fileEntry = file;
} else {
// It's an online file. We need to download it and re-upload it.
fileName = file.filename;
const path = await CoreFilepool.downloadUrl(
siteId,
CoreFileHelper.getFileUrl(file),
false,
component,
componentId,
file.timemodified,
undefined,
undefined,
file,
);
fileEntry = await CoreFile.getExternalFile(path);
}
// Now upload the file.
const extension = CoreMimetypeUtils.getFileExtension(fileName!);
const mimetype = extension ? CoreMimetypeUtils.getMimeType(extension) : undefined;
const options = this.getFileUploadOptions(fileEntry.toURL(), fileName!, mimetype, isOnline, 'draft', itemId);
const result = await this.uploadFile(fileEntry.toURL(), options, undefined, siteId);
return result.itemid;
}
/**
* Given a list of files (either online files or local files), upload them to a draft area and return the draft ID.
*
* Online files will be downloaded and then re-uploaded.
* Local files are not deleted from the device after upload.
* If there are no files to upload it will return a fake draft ID (1).
*
* @param files List of files.
* @param component The component to set to the downloaded files.
* @param componentId An ID to use in conjunction with the component.
* @param siteId Site ID. If not defined, current site.
* @return Promise resolved with the itemId.
*/
async uploadOrReuploadFiles(
files: CoreFileEntry[],
component?: string,
componentId?: string | number,
siteId?: string,
): Promise<number> {
siteId = siteId || CoreSites.getCurrentSiteId();
if (!files || !files.length) {
// Return fake draft ID.
return 1;
}
// Upload only the first file first to get a draft id.
const itemId = await this.uploadOrReuploadFile(files[0], 0, component, componentId, siteId);
const promises: Promise<number>[] = [];
for (let i = 1; i < files.length; i++) {
const file = files[i];
promises.push(this.uploadOrReuploadFile(file, itemId, component, componentId, siteId));
}
await Promise.all(promises);
return itemId;
}
}
export const CoreFileUploader = makeSingleton(CoreFileUploaderProvider);
export type CoreFileUploaderStoreFilesResult = {
online: CoreWSFile[]; // List of online files.
offline: number; // Number of offline files.
};
export type CoreFileUploaderTypeList = {
info: CoreFileUploaderTypeListInfoEntry[];
mimetypes: string[];
};
export type CoreFileUploaderTypeListInfoEntry = {
name?: string;
extlist: string;
}; | the_stack |
import ARToolKit from "./ARToolKit";
/**
* The ARController is the main object for doing AR marker detection with JSARToolKit.
*
* To use an ARController, you need to tell it the dimensions to use for the AR processing canvas and
* pass it an ARCameraParam to define the camera parameters to use when processing images.
* The ARCameraParam defines the lens distortion and aspect ratio of the camera used.
* See https://www.artoolworks.com/support/library/Calibrating_your_camera for
* more information about AR camera parameteters and how to make and use them.
*
* If you pass an image as the first argument, the ARController uses that as the image to process,
* using the dimensions of the image as AR processing canvas width and height. If the first argument
* to ARController is an image, the second argument is used as the camera param.
*
* The camera parameters argument can be either an ARCameraParam or an URL to a camera definition file.
* If the camera argument is an URL, it is loaded into a new ARCameraParam, and the ARController dispatches
* a "load" event and calls the onload method if it is defined.
*
* @exports ARController
* @constructor
* @param {number} width The width of the images to process.
* @param {number} height The height of the images to process.
* @param {ARCameraParam | string} camera The ARCameraParam to use for image processing.
* If this is a string, the ARController treats it as an URL and tries to load it as a
* ARCameraParam definition file, calling ARController#onload on success.
*/
export class ARToolKitController {
public ctx: CanvasRenderingContext2D | null;
public canvas: HTMLCanvasElement;
public videoWidth: any;
public videoHeight: any;
public orientation: string;
private framepointer: any;
private id: number;
private listeners: any;
private image: any;
private patternMarkers: any;
private barcodeMarkers: any;
private transformMat: any;
private defaultMarkerWidth: number;
private cameraParam: any;
private markerTransformMat: any;
private _bwpointer: any;
private framesize: any;
private dataHeap: any;
private cameraMat: any;
private onload: any;
private contextError = "Canvas 2D Context was not available";
constructor(width: any, height: any, camera: any) {
let w = width;
let h = height;
this.orientation = "landscape";
this.listeners = {};
// TODO: What is going on will all these types?
if (typeof width !== "number") {
const image = width;
camera = height;
w = image.videoWidth || image.width;
h = image.videoHeight || image.height;
this.image = image;
}
this.defaultMarkerWidth = 1;
this.patternMarkers = {};
this.barcodeMarkers = {};
this.transformMat = new Float32Array(16);
this.canvas = document.createElement("canvas");
this.canvas.width = w;
this.canvas.height = h;
this.ctx = this.canvas.getContext("2d");
if (this.ctx === null) {
throw Error("Could not get 2D Context for canvas element");
}
this.videoWidth = w;
this.videoHeight = h;
this.id = -1; // TODO: Quick solution to keep TSC happy
this.cameraParam = camera;
this._initialize();
}
/**
* Destroys the ARController instance and frees all associated resources.
* After calling dispose, the ARController can't be used any longer. Make a new one if you need one.
* Calling this avoids leaking Emscripten memory, which may be important if you"re
* using multiple ARControllers.
*/
public dispose() {
ARToolKit.teardown(this.id);
for (const key in this) {
if (key) {
delete this[key];
}
}
}
/**
* Detects markers in the given image. The process method dispatches marker detection events during its run.
*
* The marker detection process proceeds by first dispatching a markerNum event that tells you how many
* markers were found in the image. Next, a getMarker event is dispatched for each found marker square.
* finally, getMultiMarker is dispatched for every found multimarker
* followed by getMultiMarkerSub events
* dispatched for each of the markers in the multimarker.
* arController.addEventListener("markerNum", function(ev) {
* console.log("Detected " + ev.data + " markers.")
* });
* arController.addEventListener("getMarker", function(ev) {
* console.log("Detected marker with ids:", ev.data.marker.id, ev.data.marker.idPatt, ev.data.marker.idMatrix);
* console.log("Marker data", ev.data.marker);
* console.log("Marker transform matrix:", [].join.call(ev.data.matrix, ", "));
* });
* arController.addEventListener("getMultiMarker", function(ev) {
* console.log("Detected multimarker with id:", ev.data.multiMarkerId);
* });
* arController.addEventListener("getMultiMarkerSub", function(ev) {
* console.log("Submarker for " + ev.data.multiMarkerId, ev.data.markerIndex, ev.data.marker);
* });
* arController.process(image);
* If no image is given, defaults to this.image.
* If the debugSetup has been called, draws debug markers on the debug canvas.
* @param {HTMLImageElement|HTMLVideoElement} [image] The image to process [optional].
*/
public process(image: HTMLImageElement | HTMLVideoElement) {
this.detectMarker(image);
const markerNum = this.getMarkerNum();
for (const k in this.patternMarkers) {
if (k !== undefined) {
const o = this.patternMarkers[k];
o.inPrevious = o.inCurrent;
o.inCurrent = false;
}
}
for (const k in this.barcodeMarkers) {
if (k !== undefined) {
const o = this.barcodeMarkers[k];
o.inPrevious = o.inCurrent;
o.inCurrent = false;
}
}
let visible;
let multiEachMarkerInfo;
for (let i = 0; i < markerNum; i++) {
const markerInfo = this.getMarker(i);
let markerType = ARToolKit.UNKNOWN_MARKER;
visible = this.trackPatternMarkerId(-1);
if (
markerInfo.idPatt > -1 &&
(markerInfo.id === markerInfo.idPatt || markerInfo.idMatrix === -1)
) {
visible = this.trackPatternMarkerId(markerInfo.idPatt);
markerType = ARToolKit.PATTERN_MARKER;
if (markerInfo.dir !== markerInfo.dirPatt) {
this.setMarkerInfoDir(i, markerInfo.dirPatt);
}
} else if (markerInfo.idMatrix > -1) {
visible = this.trackBarcodeMarkerId(markerInfo.idMatrix);
markerType = ARToolKit.BARCODE_MARKER;
if (markerInfo.dir !== markerInfo.dirMatrix) {
this.setMarkerInfoDir(i, markerInfo.dirMatrix);
}
}
if (markerType !== ARToolKit.UNKNOWN_MARKER && visible.inPrevious) {
this.getTransMatSquareCont(
i,
visible.markerWidth,
visible.matrix,
visible.matrix
);
} else {
this.getTransMatSquare(i, visible.markerWidth, visible.matrix);
}
visible.inCurrent = true;
this.transMatToGLMat(visible.matrix, this.transformMat);
this.dispatchEvent({
name: "getMarker",
target: this,
data: {
index: i,
type: markerType,
marker: markerInfo,
matrix: this.transformMat,
},
});
}
const multiMarkerCount = this.getMultiMarkerCount();
for (let i = 0; i < multiMarkerCount; i++) {
const subMarkerCount = this.getMultiMarkerPatternCount(i);
visible = false;
ARToolKit.getTransMatMultiSquareRobust(this.id, i);
this.transMatToGLMat(this.markerTransformMat, this.transformMat);
for (let j = 0; j < subMarkerCount; j++) {
multiEachMarkerInfo = this.getMultiEachMarker(i, j);
if (multiEachMarkerInfo.visible >= 0) {
visible = true;
this.dispatchEvent({
name: "getMultiMarker",
target: this,
data: {
multiMarkerId: i,
matrix: this.transformMat,
},
});
break;
}
}
if (visible) {
for (let j = 0; j < subMarkerCount; j++) {
multiEachMarkerInfo = this.getMultiEachMarker(i, j);
this.transMatToGLMat(this.markerTransformMat, this.transformMat);
this.dispatchEvent({
name: "getMultiMarkerSub",
target: this,
data: {
multiMarkerId: i,
markerIndex: j,
marker: multiEachMarkerInfo,
matrix: this.transformMat,
},
});
}
}
}
if (this._bwpointer) {
this.debugDraw();
}
}
/**
* Adds the given pattern marker ID to the index of tracked IDs.
* Sets the markerWidth for the pattern marker to markerWidth.
* Used by process() to implement continuous tracking,
* keeping track of the marker's transformation matrix
* and customizable marker widths.
* @param {number} id ID of the pattern marker to track.
* @param {number} [markerWidth] The width of the marker to track.
* @return {Object} The marker tracking object.
*/
public trackPatternMarkerId(id: number, markerWidth?: number) {
let obj = this.patternMarkers[id];
if (!obj) {
this.patternMarkers[id] = obj = {
inPrevious: false,
inCurrent: false,
matrix: new Float32Array(12),
markerWidth: markerWidth || this.defaultMarkerWidth,
};
}
if (markerWidth) {
obj.markerWidth = markerWidth;
}
return obj;
}
/**
* Adds the given barcode marker ID to the index of tracked IDs.
* Sets the markerWidth for the pattern marker to markerWidth.
* Used by process() to implement continuous tracking,
* keeping track of the marker"s transformation matrix
* and customizable marker widths.
* @param {number} id ID of the barcode marker to track.
* @param {number} [markerWidth] The width of the marker to track.
* @return {Object} The marker tracking object.
*/
public trackBarcodeMarkerId(id: number, markerWidth?: number) {
let obj = this.barcodeMarkers[id];
if (!obj) {
this.barcodeMarkers[id] = obj = {
inPrevious: false,
inCurrent: false,
matrix: new Float32Array(12),
markerWidth: markerWidth || this.defaultMarkerWidth,
};
}
if (markerWidth) {
obj.markerWidth = markerWidth;
}
return obj;
}
/**
* Returns the number of multimarkers registered on this ARController.
* @return {number} Number of multimarkers registered.
*/
public getMultiMarkerCount() {
return ARToolKit.getMultiMarkerCount(this.id);
}
/**
* Returns the number of markers in the multimarker registered for the given multiMarkerId.
* @param {number} multiMarkerId The id number of the multimarker to access. Given by loadMultiMarker.
* @return {number} Number of markers in the multimarker. Negative value indicates failure to find the multimarker.
*/
public getMultiMarkerPatternCount(multiMarkerId: number) {
return ARToolKit.getMultiMarkerNum(this.id, multiMarkerId);
}
/**
* Add an event listener on this ARController for the named event, calling the callback function
* whenever that event is dispatched.
*
* Possible events are:
* - getMarker - dispatched whenever process() finds a square marker
* - getMultiMarker - dispatched whenever process() finds a visible registered multimarker
* - getMultiMarkerSub - dispatched by process() for each marker in a visible multimarker
* - load - dispatched when the ARController is ready to use (useful if passing in a camera URL in the constructor)
* @param {string} name Name of the event to listen to.
* @param {function} callback Callback function to call when an event with the given name is dispatched.
*/
public addEventListener(name: string, callback: (event: any) => any) {
if (!this.listeners[name]) {
this.listeners[name] = [];
}
this.listeners[name].push(callback);
}
/**
* Remove an event listener from the named event.
* @param {string} name Name of the event to stop listening to.
* @param {function} callback Callback function to remove from the listeners of the named event.
*/
public removeEventListener(name: string, callback: () => any) {
if (this.listeners[name]) {
const index = this.listeners[name].indexOf(callback);
if (index > -1) {
this.listeners[name].splice(index, 1);
}
}
}
/**
* Dispatches the given event to all registered listeners on event.name.
* @param {Object} event Event to dispatch.
*/
public dispatchEvent(event: any) {
const listeners = this.listeners[event.name];
if (listeners) {
for (let i = 0; i < listeners.length; i++) {
listeners[i].call(this, event);
}
}
}
/**
* Sets up a debug canvas for the AR detection. Draws a red marker on top of each detected square in the image.
* The debug canvas is added to document.body.
*/
public debugSetup() {
document.body.appendChild(this.canvas);
this.setDebugMode(1);
this._bwpointer = this.getProcessingImage();
}
/**
* Loads a pattern marker from the given URL and calls the onSuccess callback with the UID of the marker.
* arController.loadMarker(markerURL, onSuccess, onError);
* @param {string} markerURL - The URL of the marker pattern file to load.
* @param {function} onSuccess - The success callback. Called with the id of the loaded marker on a successful load.
* @param {function} onError - The error callback. Called with the encountered error if the load fails.
*/
public loadMarker(
markerURL: string,
onSuccess: (id: number) => any,
onError: (err: any) => any
) {
return ARToolKit.addMarker(this.id, markerURL, onSuccess, onError);
}
/**
* Loads a multimarker from the given URL and calls the onSuccess callback with the UID of the marker.
* arController.loadMultiMarker(markerURL, onSuccess, onError);
* @param {string} markerURL - The URL of the multimarker pattern file to load.
* @param {function} onSuccess - The success callback. Called with the id and the
* number of sub-markers of the loaded marker on a successful load.
* @param {function} onError - The error callback. Called with the encountered error if the load fails.
*/
public loadMultiMarker(
markerURL: string,
onSuccess: () => any,
onError: () => any
) {
return ARToolKit.addMultiMarker(this.id, markerURL, onSuccess, onError);
}
/**
* Populates the provided float array with the current transformation for the specified marker. After
* a call to detectMarker, all marker information will be current. Marker transformations can then be
* checked.
* @param {number} markerUID The unique identifier (UID) of the marker to query
* @param {number} markerWidth The width of the marker
* @param {Float64Array} dst The float array to populate with the 3x4 marker transformation matrix
* @return {Float64Array} The dst array.
*/
public getTransMatSquare(
markerUID: number,
markerWidth: number,
dst: Float64Array
) {
ARToolKit.getTransMatSquare(this.id, markerUID, markerWidth);
dst.set(this.markerTransformMat);
return dst;
}
/**
* Populates the provided float array with the current transformation for the specified marker, using
* previousMarkerTransform as the previously detected transformation. After
* a call to detectMarker, all marker information will be current. Marker transformations can then be
* checked.
* @param {number} markerUID The unique identifier (UID) of the marker to query
* @param {number} markerWidth The width of the marker
* @param {Float64Array} previousMarkerTransform The float array to use as the previous
* 3x4 marker transformation matrix
* @param {Float64Array} dst The float array to populate with the 3x4 marker transformation matrix
* @return {Float64Array} The dst array.
*/
public getTransMatSquareCont(
markerUID: number,
markerWidth: number,
previousMarkerTransform: Float64Array,
dst: Float64Array
) {
this.markerTransformMat.set(previousMarkerTransform);
ARToolKit.getTransMatSquareCont(this.id, markerUID, markerWidth);
dst.set(this.markerTransformMat);
return dst;
}
/**
* Populates the provided float array with the current transformation for the specified multimarker. After
* a call to detectMarker, all marker information will be current. Marker transformations can then be
* checked.
*
* @param {number} markerUID The unique identifier (UID) of the marker to query
* @param {Float64Array} dst The float array to populate with the 3x4 marker transformation matrix
* @return {Float64Array} The dst array.
*/
public getTransMatMultiSquare(markerUID: number, dst: Float64Array) {
ARToolKit.getTransMatMultiSquare(this.id, markerUID);
dst.set(this.markerTransformMat);
return dst;
}
/**
* Populates the provided float array with the current robust transformation for the specified multimarker. After
* a call to detectMarker, all marker information will be current. Marker transformations can then be
* checked.
* @param {number} markerUID The unique identifier (UID) of the marker to query
* @param {Float64Array} dst The float array to populate with the 3x4 marker transformation matrix
* @return {Float64Array} The dst array.
*/
public getTransMatMultiSquareRobust(markerUID: number, dst: Float64Array) {
ARToolKit.getTransMatMultiSquare(this.id, markerUID);
dst.set(this.markerTransformMat);
return dst;
}
/**
* Converts the given 3x4 marker transformation matrix in the 12-element transMat array
* into a 4x4 WebGL matrix and writes the result into the 16-element glMat array.
* If scale parameter is given, scales the transform of the glMat by the scale parameter.
* @param {Float64Array} transMat The 3x4 marker transformation matrix.
* @param {Float64Array} glMat The 4x4 GL transformation matrix.
* @param {number} [scale] The scale for the transform.
*/
public transMatToGLMat(
transMat: Float64Array,
glMat: Float64Array,
scale?: number
) {
glMat[0 + 0 * 4] = transMat[0]; // R1C1
glMat[0 + 1 * 4] = transMat[1]; // R1C2
glMat[0 + 2 * 4] = transMat[2];
glMat[0 + 3 * 4] = transMat[3];
glMat[1 + 0 * 4] = transMat[4]; // R2
glMat[1 + 1 * 4] = transMat[5];
glMat[1 + 2 * 4] = transMat[6];
glMat[1 + 3 * 4] = transMat[7];
glMat[2 + 0 * 4] = transMat[8]; // R3
glMat[2 + 1 * 4] = transMat[9];
glMat[2 + 2 * 4] = transMat[10];
glMat[2 + 3 * 4] = transMat[11];
glMat[3 + 0 * 4] = 0.0;
glMat[3 + 1 * 4] = 0.0;
glMat[3 + 2 * 4] = 0.0;
glMat[3 + 3 * 4] = 1.0;
if (scale !== undefined && scale !== 0.0) {
glMat[12] *= scale;
glMat[13] *= scale;
glMat[14] *= scale;
}
return glMat;
}
/**
* This is the core ARToolKit marker detection function. It calls through to a set of
* internal functions to perform the key marker detection steps of binarization and
* labelling, contour extraction, and template matching and/or matrix code extraction.
* Typically, the resulting set of detected markers is retrieved by calling arGetMarkerNum
* to get the number of markers detected and arGetMarker to get an array of ARMarkerInfo
* structures with information on each detected marker, followed by a step in which
* detected markers are possibly examined for some measure of goodness of match (e.g. by
* examining the match confidence value) and pose extraction.
* @param {HTMLImageElement|HTMLVideoElement} [image] to be processed to detect markers.
* @return {number} 0 if the function proceeded without error, or a value less than 0 in case of error.
* A result of 0 does not however, imply any markers were detected.
*/
public detectMarker(imageElement: HTMLImageElement | HTMLVideoElement) {
if (this._copyImageToHeap(imageElement)) {
return ARToolKit.detectMarker(this.id);
}
return -99;
}
/**
* Get the number of markers detected in a video frame.
* @return {number} The number of detected markers in the most recent image passed to arDetectMarker.
* Note that this is actually a count, not an index. A better name for this function would be
* arGetDetectedMarkerCount, but the current name lives on for historical reasons.
*/
public getMarkerNum() {
return ARToolKit.getMarkerNum(this.id);
}
/**
* Get the marker info struct for the given marker index in detected markers.
* Call this.detectMarker first, then use this.getMarkerNum to get the detected marker count.
* The returned object is the global artoolkit.markerInfo object and will be overwritten
* by subsequent calls. If you need to hang on to it, create a copy using this.cloneMarkerInfo();
* Returns undefined if no marker was found.
* A markerIndex of -1 is used to access the global custom marker.
* The fields of the markerInfo struct are:
* @field area Area in pixels of the largest connected region, comprising
* the marker border and regions connected to it. Note that this is
* not the same as the actual onscreen area inside the marker border.
* @field id If pattern detection mode is either pattern mode OR
* matrix but not both, will be marker ID (>= 0) if marker is valid, or -1 if invalid.
* @field idPatt If pattern detection mode includes a pattern mode,
* will be marker ID (>= 0) if marker is valid, or -1 if invalid.
* @field idMatrix If pattern detection mode includes a matrix mode,
* will be marker ID (>= 0) if marker is valid, or -1 if invalid.
* @field dir If pattern detection mode is either pattern mode OR matrix
* but not both, and id != -1, will be marker direction (range 0 to 3, inclusive).
* @field dirPatt If pattern detection mode includes a pattern mode, and
* id != -1, will be marker direction (range 0 to 3, inclusive).
* @field dirMatrix If pattern detection mode includes a matrix mode,
* and id != -1, will be marker direction (range 0 to 3, inclusive).
* @field cf If pattern detection mode is either pattern mode OR
* matrix but not both, will be marker matching confidence (range 0.0 to 1.0 inclusive)
* if marker is valid, or -1.0 if marker is invalid.
* @field cfPatt If pattern detection mode includes a pattern mode,
* will be marker matching confidence (range 0.0 to 1.0 inclusive) if marker is valid, or -1.0 if marker is invalid.
* @field cfMatrix If pattern detection mode includes a matrix mode,
* will be marker matching confidence (range 0.0 to 1.0 inclusive) if marker is valid, or -1.0 if marker is invalid.
* @field pos 2D position (in camera image coordinates, origin at top-left)
* of the centre of the marker.
* @field line Line equations for the 4 sides of the marker.
* @field vertex 2D positions (in camera image coordinates, origin at top-left)
* of the corners of the marker. vertex[(4 - dir)%4][] is the top-left corner of the marker.
* Other vertices proceed clockwise from this. These are idealised coordinates
* (i.e. the onscreen position aligns correctly with the undistorted camera image.)
* @param {number} markerIndex The index of the marker to query.
* @returns {Object} The markerInfo struct.
*/
public getMarker(markerIndex: number) {
if (0 === ARToolKit.getMarker(this.id, markerIndex)) {
return ARToolKit.getMarkerInfo();
}
}
/**
* Set marker vertices to the given vertexData[4][2] array.
* Sets the marker pos to the center of the vertices.
* Useful for building custom markers for getTransMatSquare.
* A markerIndex of -1 is used to access the global custom marker.
* @param {number} markerIndex The index of the marker to edit.
* @param {*} vertexData
*/
public setMarkerInfoVertex(markerIndex: number, vertexData: any) {
for (let i = 0; i < vertexData.length; i++) {
this.markerTransformMat[i * 2 + 0] = vertexData[i][0];
this.markerTransformMat[i * 2 + 1] = vertexData[i][1];
}
return ARToolKit.setMarkerInfoVertex(this.id, markerIndex);
}
/**
* Makes a deep copy of the given marker info.
* @param {Object} markerInfo The marker info object to copy.
* @return {Object} The new copy of the marker info.
*/
public cloneMarkerInfo(markerInfo: any) {
return JSON.parse(JSON.stringify(markerInfo));
}
/**
* Get the marker info struct for the given marker index in detected markers.
* Call this.detectMarker first, then use this.getMarkerNum to get the detected marker count.
* The returned object is the global artoolkit.markerInfo object and will be overwritten
* by subsequent calls. If you need to hang on to it, create a copy using this.cloneMarkerInfo();
* Returns undefined if no marker was found.
* @field {number} pattId The index of the marker.
* @field {number} pattType The type of the marker.
* Either AR_MULTI_PATTERN_TYPE_TEMPLATE or AR_MULTI_PATTERN_TYPE_MATRIX.
* @field {number} visible 0 or larger if the marker is visible
* @field {number} width The width of the marker.
* @param {number} multiMarkerId The multimarker to query.
* @param {number} markerIndex The index of the marker to query.
* @returns {Object} The markerInfo struct.
*/
public getMultiEachMarker(multiMarkerId: number, markerIndex: number) {
if (
0 === ARToolKit.getMultiEachMarker(this.id, multiMarkerId, markerIndex)
) {
return ARToolKit.multiEachMarkerInfo;
}
}
/**
* Returns the 16-element WebGL transformation matrix used by ARController.process to
* pass marker WebGL matrices to event listeners.
* Unique to each ARController.
* @return {Float64Array} The 16-element WebGL transformation matrix used by the ARController.
*/
public getTransformationMatrix() {
return this.transformMat;
}
/**
* Returns the projection matrix computed from camera parameters for the ARController.
*
* @return {Float64Array} The 16-element WebGL camera matrix for the ARController camera parameters.
*/
public getCameraMatrix(): Float64Array {
return this.cameraMat;
}
/**
* Returns the shared ARToolKit 3x4 marker transformation matrix, used for passing and receiving
* marker transforms to/from the Emscripten side.
* @return {Float64Array} The 12-element 3x4 row-major marker transformation matrix used by ARToolKit.
*/
public getMarkerTransformationMatrix(): Float64Array {
return this.markerTransformMat;
}
/* Setter / Getter Proxies */
/**
* Enables or disables debug mode in the tracker. When enabled, a black and white debug
* image is generated during marker detection. The debug image is useful for visualising
* the binarization process and choosing a threshold value.
* @param {boolean} mode true to enable debug mode, false to disable debug mode
* @see getDebugMode()
*/
public setDebugMode(mode: number) {
return ARToolKit.setDebugMode(this.id, mode);
}
/**
* Returns whether debug mode is currently enabled.
* @return {boolean} true when debug mode is enabled, false when debug mode is disabled
* @see setDebugMode()
*/
public getDebugMode(): boolean {
return ARToolKit.getDebugMode(this.id);
}
/**
* Returns the Emscripten HEAP offset to the debug processing image used by ARToolKit.
*
* @return {number} HEAP offset to the debug processing image.
*/
public getProcessingImage() {
return ARToolKit.getProcessingImage(this.id);
}
/**
* Sets the logging level to use by ARToolKit.
*
* //TODOC
* @param mode
*/
public setLogLevel(mode: any) {
return ARToolKit.setLogLevel(mode);
}
/**
* //TODOC
* @returns {*}
*/
public getLogLevel() {
return ARToolKit.getLogLevel();
}
/**
* //TODOC
* @param markerIndex
* @param dir
* @returns {*}
*/
public setMarkerInfoDir(markerIndex: number, dir: any) {
return ARToolKit.setMarkerInfoDir(this.id, markerIndex, dir);
}
/**
* //TODOC
* @param value
* @returns {*}
*/
public setProjectionNearPlane(value: any) {
return ARToolKit.setProjectionNearPlane(this.id, value);
}
/**
* //TODOC
* @returns {*}
*/
public getProjectionNearPlane() {
return ARToolKit.getProjectionNearPlane(this.id);
}
/**
* //TODOC
* @param value
* @returns {*}
*/
public setProjectionFarPlane(value: any) {
return ARToolKit.setProjectionFarPlane(this.id, value);
}
/**
* //TODOC
* @returns {*}
*/
public getProjectionFarPlane() {
return ARToolKit.getProjectionFarPlane(this.id);
}
/**
* Set the labeling threshold mode (auto/manual).
* @param {number} mode An integer specifying the mode. One of:
* AR_LABELING_THRESH_MODE_MANUAL,
* AR_LABELING_THRESH_MODE_AUTO_MEDIAN,
* AR_LABELING_THRESH_MODE_AUTO_OTSU,
* AR_LABELING_THRESH_MODE_AUTO_ADAPTIVE,
* AR_LABELING_THRESH_MODE_AUTO_BRACKETING
*/
public setThresholdMode(mode: any) {
return ARToolKit.setThresholdMode(this.id, mode);
}
/**
* Gets the current threshold mode used for image binarization.
* @return {number} The current threshold mode
* @see getVideoThresholdMode()
*/
public getThresholdMode() {
return ARToolKit.getThresholdMode(this.id);
}
/**
* Set the labeling threshhold.
* This function forces sets the threshold value.
* The default value is AR_DEFAULT_LABELING_THRESH which is 100.
* The current threshold mode is not affected by this call.
* Typically, this function is used when labeling threshold mode
* is AR_LABELING_THRESH_MODE_MANUAL.
* The threshold value is not relevant if threshold mode is
* AR_LABELING_THRESH_MODE_AUTO_ADAPTIVE.
*
* Background: The labeling threshold is the value which
* the AR library uses to differentiate between black and white
* portions of an ARToolKit marker. Since the actual brightness,
* contrast, and gamma of incoming images can vary signficantly
* between different cameras and lighting conditions, this
* value typically needs to be adjusted dynamically to a
* suitable midpoint between the observed values for black
* and white portions of the markers in the image.
*
* @param {number} threshold An integer in the range [0,255] (inclusive).
*/
public setThreshold(threshold: number) {
return ARToolKit.setThreshold(this.id, threshold);
}
/**
* Get the current labeling threshold.
*
* This function queries the current labeling threshold. For,
* AR_LABELING_THRESH_MODE_AUTO_MEDIAN, AR_LABELING_THRESH_MODE_AUTO_OTSU,
* and AR_LABELING_THRESH_MODE_AUTO_BRACKETING
* the threshold value is only valid until the next auto-update.
*
* The current threshold mode is not affected by this call.
*
* The threshold value is not relevant if threshold mode is
* AR_LABELING_THRESH_MODE_AUTO_ADAPTIVE.
*
* @return {number} The current threshold value.
*/
public getThreshold() {
return ARToolKit.getThreshold(this.id);
}
/**
* Set the pattern detection mode
*
* The pattern detection determines the method by which ARToolKit
* matches detected squares in the video image to marker templates
* and/or IDs. ARToolKit v4.x can match against pictorial "template" markers,
* whose pattern files are created with the mk_patt utility, in either colour
* or mono, and additionally can match against 2D-barcode-type "matrix"
* markers, which have an embedded marker ID. Two different two-pass modes
* are also available, in which a matrix-detection pass is made first,
* followed by a template-matching pass.
*
* @param {number} mode
* Options for this field are:
* AR_TEMPLATE_MATCHING_COLOR
* AR_TEMPLATE_MATCHING_MONO
* AR_MATRIX_CODE_DETECTION
* AR_TEMPLATE_MATCHING_COLOR_AND_MATRIX
* AR_TEMPLATE_MATCHING_MONO_AND_MATRIX
* The default mode is AR_TEMPLATE_MATCHING_COLOR.
*/
public setPatternDetectionMode(mode: number) {
return ARToolKit.setPatternDetectionMode(this.id, mode);
}
/**
* Returns the current pattern detection mode.
* @return {number} The current pattern detection mode.
*/
public getPatternDetectionMode() {
return ARToolKit.getPatternDetectionMode(this.id);
}
/**
* Set the size and ECC algorithm to be used for matrix code (2D barcode) marker detection.
*
* When matrix-code (2D barcode) marker detection is enabled (see arSetPatternDetectionMode)
* then the size of the barcode pattern and the type of error checking and correction (ECC)
* with which the markers were produced can be set via this function.
*
* This setting is global to a given ARHandle; It is not possible to have two different matrix
* code types in use at once.
*
* @param type The type of matrix code (2D barcode) in use. Options include:
* AR_MATRIX_CODE_3x3
* AR_MATRIX_CODE_3x3_HAMMING63
* AR_MATRIX_CODE_3x3_PARITY65
* AR_MATRIX_CODE_4x4
* AR_MATRIX_CODE_4x4_BCH_13_9_3
* AR_MATRIX_CODE_4x4_BCH_13_5_5
* The default mode is AR_MATRIX_CODE_3x3.
*/
public setMatrixCodeType(type: any) {
return ARToolKit.setMatrixCodeType(this.id, type);
}
/**
* Returns the current matrix code (2D barcode) marker detection type.
* @return {number} The current matrix code type.
*/
public getMatrixCodeType() {
return ARToolKit.getMatrixCodeType(this.id);
}
/**
* Select between detection of black markers and white markers.
*
* ARToolKit's labelling algorithm can work with both black-bordered
* markers on a white background (AR_LABELING_BLACK_REGION) or
* white-bordered markers on a black background (AR_LABELING_WHITE_REGION).
* This function allows you to specify the type of markers to look for.
* Note that this does not affect the pattern-detection algorith
* which works on the interior of the marker.
*
* @param {number} mode
* Options for this field are:
* AR_LABELING_WHITE_REGION
* AR_LABELING_BLACK_REGION
* The default mode is AR_LABELING_BLACK_REGION.
*/
public setLabelingMode(mode: any) {
return ARToolKit.setLabelingMode(this.id, mode);
}
/**
* Enquire whether detection is looking for black markers or white markers.
* See discussion for setLabelingMode.
* @result {number} The current labeling mode.
*/
public getLabelingMode() {
return ARToolKit.getLabelingMode(this.id);
}
/**
* Set the width/height of the marker pattern space, as a proportion of marker width/height.
* @param {number} pattRatio The the width/height of the marker pattern space, as a proportion of marker
* width/height. To set the default, pass AR_PATT_RATIO.
* If compatibility with ARToolKit verions 1.0 through 4.4 is required, this value
* must be 0.5.
*/
public setPattRatio(pattRatio: number) {
return ARToolKit.setPattRatio(this.id, pattRatio);
}
/**
* Returns the current ratio of the marker pattern to the total marker size.
* @return {number} The current pattern ratio.
*/
public getPattRatio(): number {
return ARToolKit.getPattRatio(this.id);
}
/**
* Set the image processing mode.
*
* When the image processing mode is AR_IMAGE_PROC_FRAME_IMAGE,
* ARToolKit processes all pixels in each incoming image
* to locate markers. When the mode is AR_IMAGE_PROC_FIELD_IMAGE,
* ARToolKit processes pixels in only every second pixel row and
* column. This is useful both for handling images from interlaced
* video sources (where alternate lines are assembled from alternate
* fields and thus have one field time-difference, resulting in a
* "comb" effect) such as Digital Video cameras.
* The effective reduction by 75% in the pixels processed also
* has utility in accelerating tracking by effectively reducing
* the image size to one quarter size, at the cost of pose accuraccy.
*
* @param {number} mode
* Options for this field are:
* AR_IMAGE_PROC_FRAME_IMAGE
* AR_IMAGE_PROC_FIELD_IMAGE
* The default mode is AR_IMAGE_PROC_FRAME_IMAGE.
*/
public setImageProcMode(mode: any) {
return ARToolKit.setImageProcMode(this.id, mode);
}
/**
* Get the image processing mode.
* See arSetImageProcMode() for a complete description.
* @return {number} The current image processing mode.
*/
public getImageProcMode() {
return ARToolKit.getImageProcMode(this.id);
}
/**
* Draw the black and white image and debug markers to the ARController canvas.
* See setDebugMode.
*/
public debugDraw() {
const debugBuffer = new Uint8ClampedArray(
ARToolKit.HEAPU8.buffer,
this._bwpointer,
this.framesize
);
const id = new ImageData(
debugBuffer,
this.canvas.width,
this.canvas.height
);
if (this.ctx) {
this.ctx.putImageData(id, 0, 0);
} else {
throw Error(this.contextError);
}
const marker_num = this.getMarkerNum();
for (let i = 0; i < marker_num; i++) {
this._debugMarker(this.getMarker(i));
}
}
private _initialize() {
this.id = ARToolKit.setup(
this.canvas.width,
this.canvas.height,
this.cameraParam.id
);
const params = ARToolKit.getFrameMalloc();
if (params) {
this.framepointer = params.framepointer;
this.framesize = params.framesize;
this.dataHeap = new Uint8Array(
ARToolKit.HEAPU8.buffer,
this.framepointer,
this.framesize
);
this.cameraMat = new Float64Array(
ARToolKit.HEAPU8.buffer,
params.camera,
16
);
this.markerTransformMat = new Float64Array(
ARToolKit.HEAPU8.buffer,
params.transform,
12
);
} else {
throw Error("frameMalloc not set");
}
this.setProjectionNearPlane(0.1);
this.setProjectionFarPlane(1000);
setTimeout(() => {
if (this.onload) {
this.onload();
}
this.dispatchEvent({
name: "load",
target: this,
});
}, 0);
}
/**
* //TODOC
*
* @param {HTMLImageElement|HTMLVideoElement} [image]
* @returns {boolean}
* @private
*/
private _copyImageToHeap(image: HTMLImageElement | HTMLVideoElement) {
if (this.ctx === null) {
throw Error(this.contextError);
}
if (!image) {
image = this.image;
}
if (
(image instanceof HTMLImageElement && image.width > image.height) ||
(image instanceof HTMLVideoElement &&
image.videoWidth > image.videoHeight)
) {
// if landscape
this.ctx.drawImage(image, 0, 0, this.canvas.width, this.canvas.height); // draw video
} else {
// if portrait
this.ctx.clearRect(0, 0, this.canvas.width, this.canvas.height);
const scale = this.canvas.height / this.canvas.width;
const scaledHeight = this.canvas.width * scale;
const scaledWidth = this.canvas.height * scale;
const marginLeft = (this.canvas.width - scaledWidth) / 2;
this.ctx.drawImage(image, marginLeft, 0, scaledWidth, scaledHeight); // draw video
}
const imageData = this.ctx.getImageData(
0,
0,
this.canvas.width,
this.canvas.height
);
const data = imageData.data;
if (this.dataHeap) {
this.dataHeap.set(data);
return true;
}
return false;
}
/**
* //TODOC
*
* @param marker
* @private
*/
private _debugMarker(marker: any) {
if (this.ctx === null) {
throw Error(this.contextError);
}
let vertex;
let pos;
vertex = marker.vertex;
const ctx = this.ctx;
ctx.strokeStyle = "red";
ctx.beginPath();
ctx.moveTo(vertex[0][0], vertex[0][1]);
ctx.lineTo(vertex[1][0], vertex[1][1]);
ctx.stroke();
ctx.beginPath();
ctx.moveTo(vertex[2][0], vertex[2][1]);
ctx.lineTo(vertex[3][0], vertex[3][1]);
ctx.stroke();
ctx.strokeStyle = "green";
ctx.beginPath();
ctx.lineTo(vertex[1][0], vertex[1][1]);
ctx.lineTo(vertex[2][0], vertex[2][1]);
ctx.stroke();
ctx.beginPath();
ctx.moveTo(vertex[3][0], vertex[3][1]);
ctx.lineTo(vertex[0][0], vertex[0][1]);
ctx.stroke();
pos = marker.pos;
ctx.beginPath();
ctx.arc(pos[0], pos[1], 8, 0, Math.PI * 2);
ctx.fillStyle = "red";
ctx.fill();
}
}
export default ARToolKitController; | the_stack |
import { Resource } from '../../resource';
import { IDataCollection } from '../../interfaces/data-collection';
import { IDocumentResource } from '../../interfaces/data-object';
import { IDocumentData } from '../../interfaces/document';
import { IDataResource } from '../../interfaces/data-resource';
import { DocumentCollection } from '../../document-collection';
import { DocumentResource } from '../../document-resource';
import { Author } from './authors.service';
import { Book } from './books.service';
import { Photo } from './photos.service';
import * as faker from 'faker';
export class TestFactory {
// NOTE: this is going to be used to merge getAuthor, getBook and getPhoto in 1 method
private static resource_classes_by_type = {
photos: Photo,
books: Book,
authors: Author
};
public static getResourceDocumentData(document_class: typeof Resource, include: Array<string> = [], id?: string): IDocumentData {
let main_resource: Resource = this[`get${document_class.name}`](id, include);
let document_data: IDocumentData = main_resource.toObject();
TestFactory.fillDocumentDataIncludedRelatioships(document_data, include);
return document_data;
}
public static getCollectionDocumentData(document_class: typeof Resource, size = 2, include: Array<string> = []): IDocumentData {
let main_collection: DocumentCollection = this.getCollection(document_class, size, include);
let document_data: IDocumentData = main_collection.toObject();
TestFactory.fillDocumentDataIncludedRelatioships(document_data, include);
return document_data;
}
public static getBookDocumentData() {
/**/
}
public static getPhotoDocumentData() {
/**/
}
// TODO: uncomment and complete when thinking a way to find a resource relationship's class
// public static getResource(resource_class: typeof Resource, id?: string, include: Array<string> = [], ttl = 0): Resource {
// // NOTE: create book
// let resource: Resource = new resource_class();
// resource.id = this.getId(id);
// resource.attributes.title = faker.name.title();
//
// // NOTE: add author
// (<IDataResource>resource.relationships.author.data) = this.getDataResourceWithType('authors');
// if (include.includes('author')) {
// this.includeHasOneFromService(resource, 'author', Photo);
// }
//
// // NOTE: add photos
// (resource.relationships.photos.data as Array<IDataResource>).concat(this.getDataResourcesWithType('photos', 2));
// if (include.includes('photos')) {
// this.includeHasManyFromService(resource, 'photos', Photo);
// }
//
// return resource;
// }
public static getBook(id?: string, include: Array<string> = [], ttl = 0): Book {
// NOTE: create book
let book: Book = new Book();
book.id = this.getId(id);
book.ttl = ttl;
TestFactory.fillBookAttributes(book);
// NOTE: add author
(<IDataResource>book.relationships.author.data) = this.getDataResourceWithType('authors');
if (include.includes('author')) {
this.includeFromService(book, 'author', Author);
}
// NOTE: add photos
book.relationships.photos.data = book.relationships.photos.data.concat(<Array<Photo>>this.getDataResourcesWithType('photos', 2));
if (include.includes('photos')) {
this.includeFromService(book, 'photos', Photo);
}
return book;
}
public static getAuthor(id?: string, include: Array<string> = [], ttl = 0): Author {
// NOTE: create author
let author: Author = new Author();
author.id = this.getId(id);
author.ttl = ttl;
TestFactory.fillAuthorAttributes(author);
// NOTE: add books
author.relationships.books.data = author.relationships.books.data.concat(<Array<Book>>this.getDataResourcesWithType('books', 2));
if (include.includes('books')) {
this.includeFromService(author, 'books', Book);
for (let book of author.relationships.books.data) {
(<Resource>book.relationships.author.data).id = author.id;
}
}
// NOTE: add photos
author.relationships.photos.data = author.relationships.photos.data.concat(<Array<Photo>>(
this.getDataResourcesWithType('photos', 2)
));
if (include.includes('photos')) {
this.includeFromService(author, 'photos', Photo);
}
return author;
}
public static getPhoto(id?: string, include: Array<string> = [], ttl = 0): Photo {
let photo: Photo = new Photo();
photo.id = this.getId(id);
photo.ttl = ttl;
TestFactory.fillPhotoAttirbutes(photo);
return photo;
}
public static getCollection(resources_class: typeof Resource, size: number = 2, include: Array<string> = []): DocumentCollection {
let collection: DocumentCollection = new DocumentCollection();
for (let index = 0; index < size; index++) {
let factory_name = `get${resources_class.name}`;
let resource = this[factory_name](undefined, include);
collection.data.push(resource);
}
collection.setBuilded(true);
collection.setLoaded(true);
collection.cache_last_update = Date.now();
return collection;
}
// TODO: create a dynamic attribute filler by data type and merge 3 methods in 1
private static fillAuthorAttributes(author: Author): Author {
author.attributes.name = faker.name.firstName();
author.attributes.date_of_birth = faker.date.past();
author.attributes.date_of_death = faker.date.past();
author.attributes.created_at = faker.date.past();
author.attributes.updated_at = faker.date.past();
return author;
}
// TODO: create a dynamic attribute filler by data type and merge 3 methods in 1
private static fillBookAttributes(book: Book): Book {
book.attributes.title = faker.name.title();
book.attributes.date_published = faker.date.past();
book.attributes.created_at = faker.date.past();
book.attributes.updated_at = faker.date.past();
return book;
}
// TODO: create a dynamic attribute filler by data type and merge 3 methods in 1
private static fillPhotoAttirbutes(book: Photo): Photo {
book.attributes.title = faker.name.title();
book.attributes.uri = faker.internet.url();
book.attributes.imageable_id = faker.random.uuid();
book.attributes.created_at = faker.date.past();
book.attributes.updated_at = faker.date.past();
return book;
}
private static getId(id?: string) {
return id || 'new_' + Math.floor(Math.random() * 10000).toString();
}
private static includeFromService(resource: Resource, relationship_alias: string, class_to_add: typeof Resource) {
let relationship = resource.relationships[relationship_alias];
if (!relationship) {
console.error(`${relationship_alias} relationship doesn't exist in ${resource.type}`);
return;
} else if (relationship.data && 'id' in relationship.data) {
this.includeHasOneFromService(resource, relationship_alias, class_to_add);
} else if (relationship instanceof DocumentCollection) {
this.includeHasManyFromService(resource, relationship_alias, class_to_add);
}
}
private static includeHasOneFromService(resource: Resource, relationship_alias: string, class_to_add: typeof Resource) {
let resource_to_add: Resource = new class_to_add();
let relationship = <DocumentResource>resource.relationships[relationship_alias];
if (!relationship || !relationship.data) {
return;
}
resource_to_add.id = relationship.data.id;
let fill_method = `fill${class_to_add.name}Attributes`;
TestFactory[fill_method](resource_to_add);
resource.addRelationship(resource_to_add, relationship_alias);
}
private static includeHasManyFromService(resource: Resource, relationship_alias: string, class_to_add: typeof Resource) {
let resources_to_add: Array<Resource> = [];
for (let resource_relatioship of (<DocumentCollection>resource.relationships[relationship_alias]).data) {
let resource_to_add: Resource = new class_to_add();
resource_to_add.id = resource_relatioship.id;
let fill_method = `fill${class_to_add.name}Attributes`;
TestFactory[fill_method](resource_to_add);
resources_to_add.push(resource_to_add);
}
// @TODO: cannot use addRelationships because its not working here... SHOULD BE FIXED
// resource.addRelationships(resources_to_add, relationship_alias);
resource.relationships[relationship_alias].data = resources_to_add;
}
private static getDataResourceWithType(type: string, id?: string): IDataResource {
return {
id: this.getId(id),
type: type
};
}
private static getDataResourcesWithType(type: string, qty: number): Array<IDataResource> {
let data_resources: Array<IDataResource> = [];
for (let index = 0; index < qty; index++) {
data_resources.push(this.getDataResourceWithType(type));
}
return data_resources;
}
// @TODO: this method was adapted after adding toObject in server mocks... check if its 100% OK
private static fillResourceRelationshipsInDocumentData(document_data: IDocumentData, resource: Resource, included_alias: string) {
if (!document_data.included) {
document_data.included = [];
}
let relationship_content: DocumentResource | DocumentCollection | IDocumentResource | IDataCollection =
resource.relationships[included_alias];
// @NOTE: cannot check IDocumentResource interface with instanceof
if (relationship_content instanceof DocumentResource || 'type' in relationship_content.data) {
let relation_data = (<DocumentResource | IDocumentResource>relationship_content).data;
if (!relation_data) {
console.warn('relationship content is empty');
return;
}
let resource_class = TestFactory.resource_classes_by_type[relation_data.type];
if (!resource_class) {
console.warn(`cannot find the required class for type ${relation_data.type}`);
return;
}
document_data.included.push(
// @TODO: improve this code... should avoid forced types and ts errors...
this[`get${resource_class.name}`](relation_data.id)
);
// @NOTE: cannot check IDataResource interface with instanceof
} else if (relationship_content instanceof DocumentCollection) {
for (let has_many_relationship of (<DocumentCollection>resource.relationships[included_alias]).data) {
document_data.included.push(
this[`get${TestFactory.resource_classes_by_type[has_many_relationship.type].name}`](has_many_relationship.id)
);
}
}
}
private static fillDocumentDataIncludedRelatioships(document_data: IDocumentData, include: Array<string>) {
for (let included_alias of include) {
if (!document_data.included) {
document_data.included = [];
}
if ((<Resource>document_data.data).id) {
if (!(<Resource>document_data.data).relationships[included_alias].data) {
continue;
}
TestFactory.fillResourceRelationshipsInDocumentData(document_data, <Resource>document_data.data, included_alias);
return;
}
for (let resource of <Array<Resource>>document_data.data) {
TestFactory.fillResourceRelationshipsInDocumentData(document_data, resource, included_alias);
}
}
}
} | the_stack |
//@ts-check
///<reference path="devkit.d.ts" />
declare namespace DevKit {
namespace FormPhone_Call {
interface Header extends DevKit.Controls.IHeader {
/** Enter the user or team who is assigned to manage the record. This field is updated every time the record is assigned to a different user. */
OwnerId: DevKit.Controls.Lookup;
/** Select the priority so that preferred customers or critical issues are handled quickly. */
PriorityCode: DevKit.Controls.OptionSet;
/** Enter the expected due date and time. */
ScheduledEnd: DevKit.Controls.DateTime;
/** Shows whether the phone call is open, completed, or canceled. Completed and canceled phone calls are read-only and can't be edited. */
StateCode: DevKit.Controls.OptionSet;
}
interface tab_phonecall_Sections {
general_information: DevKit.Controls.Section;
phone_call_description: DevKit.Controls.Section;
phone_call_details: DevKit.Controls.Section;
tab_2_section_2: DevKit.Controls.Section;
}
interface tab_phonecall extends DevKit.Controls.ITab {
Section: tab_phonecall_Sections;
}
interface Tabs {
phonecall: tab_phonecall;
}
interface Body {
Tab: Tabs;
/** Type the number of minutes spent on the phone call. The duration is used in reporting. */
ActualDurationMinutes: DevKit.Controls.Integer;
/** Type additional information to describe the phone call, such as the primary message or the products and services discussed. */
Description: DevKit.Controls.String;
/** Select the direction of the phone call as incoming or outbound. */
DirectionCode: DevKit.Controls.Boolean;
/** Enter the account, contact, lead, or user who made the phone call. */
from: DevKit.Controls.Lookup;
/** Type the phone number. */
PhoneNumber: DevKit.Controls.String;
/** Unique identifier of the object with which the phone call activity is associated. */
RegardingObjectId: DevKit.Controls.Lookup;
/** Type a short description about the objective or primary topic of the phone call. */
Subject: DevKit.Controls.String;
/** Enter the account, contact, lead, or user recipients of the phone call. */
to: DevKit.Controls.Lookup;
}
}
class FormPhone_Call extends DevKit.IForm {
/**
* DynamicsCrm.DevKit form Phone_Call
* @param executionContext the execution context
* @param defaultWebResourceName default resource name. E.g.: "devkit_/resources/Resource"
*/
constructor(executionContext: any, defaultWebResourceName?: string);
/** Utility functions/methods/objects for Dynamics 365 form */
Utility: DevKit.Utility;
/** The Body section of form Phone_Call */
Body: DevKit.FormPhone_Call.Body;
/** The Header section of form Phone_Call */
Header: DevKit.FormPhone_Call.Header;
}
namespace FormPhone_Call_for_Interactive_experience {
interface Header extends DevKit.Controls.IHeader {
/** Enter the user or team who is assigned to manage the record. This field is updated every time the record is assigned to a different user. */
OwnerId: DevKit.Controls.Lookup;
/** Select the priority so that preferred customers or critical issues are handled quickly. */
PriorityCode: DevKit.Controls.OptionSet;
/** Enter the expected due date and time. */
ScheduledEnd: DevKit.Controls.DateTime;
/** Shows whether the phone call is open, completed, or canceled. Completed and canceled phone calls are read-only and can't be edited. */
StateCode: DevKit.Controls.OptionSet;
}
interface tab_tab_2_Sections {
tab_2_section_1: DevKit.Controls.Section;
tab_2_section_2: DevKit.Controls.Section;
tab_2_section_4: DevKit.Controls.Section;
}
interface tab_tab_2 extends DevKit.Controls.ITab {
Section: tab_tab_2_Sections;
}
interface Tabs {
tab_2: tab_tab_2;
}
interface Body {
Tab: Tabs;
/** Type the number of minutes spent on the phone call. The duration is used in reporting. */
ActualDurationMinutes: DevKit.Controls.Integer;
/** Type additional information to describe the phone call, such as the primary message or the products and services discussed. */
Description: DevKit.Controls.String;
/** Select the direction of the phone call as incoming or outbound. */
DirectionCode: DevKit.Controls.Boolean;
/** Enter the account, contact, lead, or user who made the phone call. */
from: DevKit.Controls.Lookup;
/** Type the phone number. */
PhoneNumber: DevKit.Controls.String;
/** Unique identifier of the object with which the phone call activity is associated. */
RegardingObjectId: DevKit.Controls.Lookup;
/** Unique identifier of the object with which the phone call activity is associated. */
RegardingObjectId_1: DevKit.Controls.Lookup;
/** Type a short description about the objective or primary topic of the phone call. */
Subject: DevKit.Controls.String;
/** Enter the account, contact, lead, or user recipients of the phone call. */
to: DevKit.Controls.Lookup;
}
}
class FormPhone_Call_for_Interactive_experience extends DevKit.IForm {
/**
* DynamicsCrm.DevKit form Phone_Call_for_Interactive_experience
* @param executionContext the execution context
* @param defaultWebResourceName default resource name. E.g.: "devkit_/resources/Resource"
*/
constructor(executionContext: any, defaultWebResourceName?: string);
/** Utility functions/methods/objects for Dynamics 365 form */
Utility: DevKit.Utility;
/** The Body section of form Phone_Call_for_Interactive_experience */
Body: DevKit.FormPhone_Call_for_Interactive_experience.Body;
/** The Header section of form Phone_Call_for_Interactive_experience */
Header: DevKit.FormPhone_Call_for_Interactive_experience.Header;
}
namespace FormPhone_call_quick_create_form {
interface tab_PhoneCall_Tab_1_Sections {
PhoneCall_Description: DevKit.Controls.Section;
PhoneCall_Description_2: DevKit.Controls.Section;
PhoneCall_Description_3: DevKit.Controls.Section;
}
interface tab_PhoneCall_Tab_1 extends DevKit.Controls.ITab {
Section: tab_PhoneCall_Tab_1_Sections;
}
interface Tabs {
PhoneCall_Tab_1: tab_PhoneCall_Tab_1;
}
interface Body {
Tab: Tabs;
/** Type the number of minutes spent on the phone call. The duration is used in reporting. */
ActualDurationMinutes: DevKit.Controls.Integer;
/** Type additional information to describe the phone call, such as the primary message or the products and services discussed. */
Description: DevKit.Controls.String;
/** Select the direction of the phone call as incoming or outbound. */
DirectionCode: DevKit.Controls.Boolean;
/** Enter the account, contact, lead, or user who made the phone call. */
from: DevKit.Controls.Lookup;
/** Enter the user or team who is assigned to manage the record. This field is updated every time the record is assigned to a different user. */
OwnerId: DevKit.Controls.Lookup;
/** Type the phone number. */
PhoneNumber: DevKit.Controls.String;
/** Select the priority so that preferred customers or critical issues are handled quickly. */
PriorityCode: DevKit.Controls.OptionSet;
/** Unique identifier of the object with which the phone call activity is associated. */
RegardingObjectId: DevKit.Controls.Lookup;
/** Enter the expected due date and time. */
ScheduledEnd: DevKit.Controls.DateTime;
/** Type a short description about the objective or primary topic of the phone call. */
Subject: DevKit.Controls.String;
/** Enter the account, contact, lead, or user recipients of the phone call. */
to: DevKit.Controls.Lookup;
}
}
class FormPhone_call_quick_create_form extends DevKit.IForm {
/**
* DynamicsCrm.DevKit form Phone_call_quick_create_form
* @param executionContext the execution context
* @param defaultWebResourceName default resource name. E.g.: "devkit_/resources/Resource"
*/
constructor(executionContext: any, defaultWebResourceName?: string);
/** Utility functions/methods/objects for Dynamics 365 form */
Utility: DevKit.Utility;
/** The Body section of form Phone_call_quick_create_form */
Body: DevKit.FormPhone_call_quick_create_form.Body;
}
class PhoneCallApi {
/**
* DynamicsCrm.DevKit PhoneCallApi
* @param entity The entity object
*/
constructor(entity?: any);
/**
* Get the value of alias
* @param alias the alias value
* @param isMultiOptionSet true if the alias is multi OptionSet
*/
getAliasedValue(alias: string, isMultiOptionSet?: boolean): any;
/**
* Get the formatted value of alias
* @param alias the alias value
* @param isMultiOptionSet true if the alias is multi OptionSet
*/
getAliasedFormattedValue(alias: string, isMultiOptionSet?: boolean): string;
/** The entity object */
Entity: any;
/** The entity name */
EntityName: string;
/** The entity collection name */
EntityCollectionName: string;
/** The @odata.etag is then used to build a cache of the response that is dependant on the fields that are retrieved */
"@odata.etag": string;
/** For internal use only. */
ActivityAdditionalParams: DevKit.WebApi.StringValue;
/** Unique identifier of the phone call activity. */
ActivityId: DevKit.WebApi.GuidValue;
/** Type the number of minutes spent on the phone call. The duration is used in reporting. */
ActualDurationMinutes: DevKit.WebApi.IntegerValue;
/** Enter the actual end date and time of the phone call. By default, it displays the date and time when the activity was completed or canceled, but can be edited to capture the actual duration of the phone call. */
ActualEnd_UtcDateOnly: DevKit.WebApi.UtcDateOnlyValue;
/** Enter the actual start date and time for the phone call. By default, it displays the date and time when the activity was created, but can be edited to capture the actual duration of the phone call. */
ActualStart_UtcDateOnly: DevKit.WebApi.UtcDateOnlyValue;
/** Type a category to identify the phone call type, such as lead gathering or customer follow-up, to tie the phone call to a business group or function. */
Category: DevKit.WebApi.StringValue;
/** Shows who created the record. */
CreatedBy: DevKit.WebApi.LookupValueReadonly;
/** Shows the date and time when the record was created. The date and time are displayed in the time zone selected in Microsoft Dynamics 365 options. */
CreatedOn_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValueReadonly;
/** Shows who created the record on behalf of another user. */
CreatedOnBehalfBy: DevKit.WebApi.LookupValueReadonly;
/** Type additional information to describe the phone call, such as the primary message or the products and services discussed. */
Description: DevKit.WebApi.StringValue;
/** Select the direction of the phone call as incoming or outbound. */
DirectionCode: DevKit.WebApi.BooleanValue;
/** Shows the conversion rate of the record's currency. The exchange rate is used to convert all money fields in the record from the local currency to the system's default currency. */
ExchangeRate: DevKit.WebApi.DecimalValueReadonly;
/** Unique identifier of the data import or data migration that created this record. */
ImportSequenceNumber: DevKit.WebApi.IntegerValue;
/** Information which specifies whether the phone call activity was billed as part of resolving a case. */
IsBilled: DevKit.WebApi.BooleanValue;
/** Information regarding whether the activity is a regular activity type or event type. */
IsRegularActivity: DevKit.WebApi.BooleanValueReadonly;
/** Indication which specifies if the phone call activity was created by a workflow rule. */
IsWorkflowCreated: DevKit.WebApi.BooleanValue;
/** Contains the date and time stamp of the last on hold time. */
LastOnHoldTime_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValue;
/** Select whether a voice mail was left for the person. */
LeftVoiceMail: DevKit.WebApi.BooleanValue;
/** Shows who last updated the record. */
ModifiedBy: DevKit.WebApi.LookupValueReadonly;
/** Shows the date and time when the record was last updated. The date and time are displayed in the time zone selected in Microsoft Dynamics 365 options. */
ModifiedOn_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValueReadonly;
/** Shows who last updated the record on behalf of another user. */
ModifiedOnBehalfBy: DevKit.WebApi.LookupValueReadonly;
/** Shows how long, in minutes, that the record was on hold. */
OnHoldTime: DevKit.WebApi.IntegerValueReadonly;
/** Date and time that the record was migrated. */
OverriddenCreatedOn_UtcDateOnly: DevKit.WebApi.UtcDateOnlyValue;
/** Enter the user who is assigned to manage the record. This field is updated every time the record is assigned to a different user */
OwnerId_systemuser: DevKit.WebApi.LookupValue;
/** Enter the team who is assigned to manage the record. This field is updated every time the record is assigned to a different team */
OwnerId_team: DevKit.WebApi.LookupValue;
/** Unique identifier of the business unit that owns the phone call activity. */
OwningBusinessUnit: DevKit.WebApi.LookupValueReadonly;
/** Unique identifier of the team that owns the phone call activity. */
OwningTeam: DevKit.WebApi.LookupValueReadonly;
/** Unique identifier of the user that owns the phone call activity. */
OwningUser: DevKit.WebApi.LookupValueReadonly;
/** Type the phone number. */
PhoneNumber: DevKit.WebApi.StringValue;
/** Select the priority so that preferred customers or critical issues are handled quickly. */
PriorityCode: DevKit.WebApi.OptionSetValue;
/** Shows the ID of the process. */
ProcessId: DevKit.WebApi.GuidValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_account_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_bookableresourcebooking_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_bookableresourcebookingheader_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_bulkoperation_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_campaign_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_campaignactivity_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_contact_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_contract_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_entitlement_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_entitlementtemplate_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_incident_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_invoice_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_knowledgearticle_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_knowledgebaserecord_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_lead_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_agreement_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_agreementbookingdate_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_agreementbookingincident_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_agreementbookingproduct_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_agreementbookingservice_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_agreementbookingservicetask_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_agreementbookingsetup_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_agreementinvoicedate_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_agreementinvoiceproduct_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_agreementinvoicesetup_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_bookingalertstatus_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_bookingrule_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_bookingtimestamp_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_customerasset_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_fieldservicesetting_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_incidenttypecharacteristic_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_incidenttypeproduct_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_incidenttypeservice_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_inventoryadjustment_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_inventoryadjustmentproduct_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_inventoryjournal_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_inventorytransfer_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_payment_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_paymentdetail_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_paymentmethod_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_paymentterm_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_playbookinstance_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_postalbum_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_postalcode_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_processnotes_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_productinventory_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_projectteam_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_purchaseorder_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_purchaseorderbill_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_purchaseorderproduct_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_purchaseorderreceipt_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_purchaseorderreceiptproduct_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_purchaseordersubstatus_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_quotebookingincident_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_quotebookingproduct_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_quotebookingservice_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_quotebookingservicetask_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_resourceterritory_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_rma_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_rmaproduct_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_rmareceipt_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_rmareceiptproduct_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_rmasubstatus_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_rtv_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_rtvproduct_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_rtvsubstatus_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_shipvia_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_systemuserschedulersetting_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_timegroup_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_timegroupdetail_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_timeoffrequest_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_warehouse_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_workorder_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_workordercharacteristic_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_workorderincident_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_workorderproduct_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_workorderresourcerestriction_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_workorderservice_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_msdyn_workorderservicetask_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_opportunity_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_quote_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_salesorder_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_site_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_uii_action_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_uii_hostedapplication_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_uii_nonhostedapplication_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_uii_option_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_uii_savedsession_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_uii_workflow_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_uii_workflowstep_phonecall: DevKit.WebApi.LookupValue;
/** Unique identifier of the object with which the phone call activity is associated. */
regardingobjectid_uii_workflow_workflowstep_mapping_phonecall: DevKit.WebApi.LookupValue;
/** Scheduled duration of the phone call activity, specified in minutes. */
ScheduledDurationMinutes: DevKit.WebApi.IntegerValueReadonly;
/** Enter the expected due date and time. */
ScheduledEnd_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValue;
/** Enter the expected due date and time. */
ScheduledStart_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValue;
/** Unique identifier for an associated service. */
ServiceId: DevKit.WebApi.LookupValue;
/** Choose the service level agreement (SLA) that you want to apply to the Phone Call record. */
SLAId: DevKit.WebApi.LookupValue;
/** Last SLA that was applied to this Phone Call. This field is for internal use only. */
SLAInvokedId: DevKit.WebApi.LookupValueReadonly;
SLAName: DevKit.WebApi.StringValueReadonly;
/** Shows the date and time by which the activities are sorted. */
SortDate_UtcDateAndTime: DevKit.WebApi.UtcDateAndTimeValue;
/** Shows the ID of the stage. */
StageId: DevKit.WebApi.GuidValue;
/** Shows whether the phone call is open, completed, or canceled. Completed and canceled phone calls are read-only and can't be edited. */
StateCode: DevKit.WebApi.OptionSetValue;
/** Select the phone call's status. */
StatusCode: DevKit.WebApi.OptionSetValue;
/** Type a subcategory to identify the phone call type and relate the activity to a specific product, sales region, business group, or other function. */
Subcategory: DevKit.WebApi.StringValue;
/** Type a short description about the objective or primary topic of the phone call. */
Subject: DevKit.WebApi.StringValue;
/** For internal use only. */
SubscriptionId: DevKit.WebApi.GuidValue;
/** For internal use only. */
TimeZoneRuleVersionNumber: DevKit.WebApi.IntegerValue;
/** Choose the local currency for the record to make sure budgets are reported in the correct currency. */
TransactionCurrencyId: DevKit.WebApi.LookupValue;
/** For internal use only. */
TraversedPath: DevKit.WebApi.StringValue;
/** Time zone code that was in use when the record was created. */
UTCConversionTimeZoneCode: DevKit.WebApi.IntegerValue;
/** Version number of the phone call activity. */
VersionNumber: DevKit.WebApi.BigIntValueReadonly;
/** The array of object that can cast object to ActivityPartyApi class */
ActivityParties: Array<any>;
}
}
declare namespace OptionSet {
namespace PhoneCall {
enum PriorityCode {
/** 2 */
High,
/** 0 */
Low,
/** 1 */
Normal
}
enum StateCode {
/** 2 */
Canceled,
/** 1 */
Completed,
/** 0 */
Open
}
enum StatusCode {
/** 3 */
Canceled,
/** 2 */
Made,
/** 1 */
Open,
/** 4 */
Received
}
enum RollupState {
/** 0 - Attribute value is yet to be calculated */
NotCalculated,
/** 1 - Attribute value has been calculated per the last update time in <AttributeSchemaName>_Date attribute */
Calculated,
/** 2 - Attribute value calculation lead to overflow error */
OverflowError,
/** 3 - Attribute value calculation failed due to an internal error, next run of calculation job will likely fix it */
OtherError,
/** 4 - Attribute value calculation failed because the maximum number of retry attempts to calculate the value were exceeded likely due to high number of concurrency and locking conflicts */
RetryLimitExceeded,
/** 5 - Attribute value calculation failed because maximum hierarchy depth limit for calculation was reached */
HierarchicalRecursionLimitReached,
/** 6 - Attribute value calculation failed because a recursive loop was detected in the hierarchy of the record */
LoopDetected
}
}
}
//{'JsForm':['Phone Call','Phone Call for Interactive experience','Phone call quick create form.'],'JsWebApi':true,'IsDebugForm':true,'IsDebugWebApi':true,'Version':'2.12.31','JsFormVersion':'v2'} | the_stack |
import type {
DidSignature,
IDidDetails,
IDidResolver,
IIdentity,
IDidKeyDetails,
KeystoreSigner,
SubmittableExtrinsic,
VerificationKeyRelationship,
IDidServiceEndpoint,
} from '@kiltprotocol/types'
import { KeyRelationship } from '@kiltprotocol/types'
import { SDKErrors, Crypto } from '@kiltprotocol/utils'
import { hexToU8a, isHex } from '@polkadot/util'
import type { Registry } from '@polkadot/types/types'
import { checkAddress, encodeAddress } from '@polkadot/util-crypto'
import { DefaultResolver } from './DidResolver/DefaultResolver'
import type {
PublicKeyEnum,
IDidCreationOptions,
IAuthorizeCallOptions,
DidAuthorizedCallOperation,
DidCreationDetails,
DidPublicKey,
INewPublicKey,
PublicKeyRoleAssignment,
IDidParsingResult,
IServiceEndpointChainRecordCodec,
} from './types'
import { generateCreateTx } from './Did.chain'
import { LightDidDetails } from '.'
export const KILT_DID_PREFIX = 'did:kilt:'
// Matches the following full DIDs
// - did:kilt:<kilt_address>
// - did:kilt:<kilt_address>#<fragment>
export const FULL_KILT_DID_REGEX =
/^did:kilt:(?<identifier>4[1-9a-km-zA-HJ-NP-Z]{47})(?<fragment>#[^#\n]+)?$/
// Matches the following light DIDs
// - did:kilt:light:00<kilt_address>
// - did:kilt:light:01<kilt_address>:<encoded_details>
// - did:kilt:light:10<kilt_address>#<fragment>
// - did:kilt:light:99<kilt_address>:<encoded_details>#<fragment>
export const LIGHT_KILT_DID_REGEX =
/^did:kilt:light:(?<auth_key_type>[0-9]{2})(?<identifier>4[1-9a-km-zA-HJ-NP-Z]{47,48})(?<encoded_details>:.+?)?(?<fragment>#[^#\n]+)?$/
export enum CHAIN_SUPPORTED_SIGNATURE_KEY_TYPES {
ed25519 = 'ed25519',
sr25519 = 'sr25519',
secp256k1 = 'secp256k1',
}
export enum CHAIN_SUPPORTED_ENCRYPTION_KEY_TYPES {
x25519 = 'x25519',
}
export const CHAIN_SUPPORTED_KEY_TYPES = {
...CHAIN_SUPPORTED_ENCRYPTION_KEY_TYPES,
...CHAIN_SUPPORTED_SIGNATURE_KEY_TYPES,
}
export type CHAIN_SUPPORTED_KEY_TYPES = typeof CHAIN_SUPPORTED_KEY_TYPES
const SignatureAlgForKeyType = {
[CHAIN_SUPPORTED_SIGNATURE_KEY_TYPES.ed25519]: 'ed25519',
[CHAIN_SUPPORTED_SIGNATURE_KEY_TYPES.sr25519]: 'sr25519',
[CHAIN_SUPPORTED_SIGNATURE_KEY_TYPES.secp256k1]: 'ecdsa-secp256k1',
}
export function getSignatureAlgForKeyType(keyType: string): string {
return SignatureAlgForKeyType[keyType] || keyType
}
export enum LIGHT_DID_SUPPORTED_SIGNING_KEY_TYPES {
ed25519 = 'ed25519',
sr25519 = 'sr25519',
}
const EncodingForSigningKeyType = {
[LIGHT_DID_SUPPORTED_SIGNING_KEY_TYPES.sr25519]: '00',
[LIGHT_DID_SUPPORTED_SIGNING_KEY_TYPES.ed25519]: '01',
}
const SigningKeyTypeFromEncoding = {
'00': LIGHT_DID_SUPPORTED_SIGNING_KEY_TYPES.sr25519,
'01': LIGHT_DID_SUPPORTED_SIGNING_KEY_TYPES.ed25519,
}
export function getEncodingForSigningKeyType(keyType: string): string {
return EncodingForSigningKeyType[keyType] || null
}
export function getSigningKeyTypeFromEncoding(encoding: string): string {
return SigningKeyTypeFromEncoding[encoding]?.toString() || null
}
function getLightDidFromIdentifier(identifier: string, didVersion = 1): string {
const versionString = didVersion === 1 ? '' : `:v${didVersion}`
return KILT_DID_PREFIX.concat(`light${versionString}:${identifier}`)
}
function getFullDidFromIdentifier(identifier: string, didVersion = 1): string {
const versionString = didVersion === 1 ? '' : `v${didVersion}:`
return KILT_DID_PREFIX.concat(`${versionString}${identifier}`)
}
export function getKiltDidFromIdentifier(
identifier: string,
didType: 'full' | 'light',
didVersion = 1
): string {
if (identifier.startsWith(KILT_DID_PREFIX)) {
if (
FULL_KILT_DID_REGEX.exec(identifier) ||
LIGHT_KILT_DID_REGEX.exec(identifier)
) {
return identifier
}
throw SDKErrors.ERROR_INVALID_DID_FORMAT
}
switch (didType) {
case 'full':
return getFullDidFromIdentifier(identifier, didVersion)
case 'light':
return getLightDidFromIdentifier(identifier, didVersion)
default:
throw SDKErrors.ERROR_UNSUPPORTED_DID(didType)
}
}
export function parseDidUrl(didUrl: string): IDidParsingResult {
let matches = FULL_KILT_DID_REGEX.exec(didUrl)?.groups
if (matches && matches.identifier) {
const version = matches.version ? parseInt(matches.version, 10) : 1
return {
did: getKiltDidFromIdentifier(matches.identifier, 'full', version),
version,
type: 'full',
identifier: matches.identifier,
fragment: matches.fragment?.substring(1),
}
}
// If it fails to parse full DID, try with light DID
matches = LIGHT_KILT_DID_REGEX.exec(didUrl)?.groups
if (matches && matches.identifier && matches.auth_key_type) {
const version = matches.version ? parseInt(matches.version, 10) : 1
const lightDidIdentifier = matches.auth_key_type.concat(matches.identifier)
return {
did: getKiltDidFromIdentifier(lightDidIdentifier, 'light', version),
version,
type: 'light',
identifier: matches.auth_key_type.concat(matches.identifier),
fragment: matches.fragment?.substring(1),
encodedDetails: matches.encoded_details?.substring(1),
}
}
throw SDKErrors.ERROR_INVALID_DID_FORMAT(didUrl)
}
export function getIdentifierFromKiltDid(did: string): string {
return parseDidUrl(did).identifier
}
export function validateKiltDid(
input: unknown,
allowFragment = false
): input is IDidDetails['did'] {
if (typeof input !== 'string') {
throw TypeError(`DID string expected, got ${typeof input}`)
}
const { identifier, type, fragment } = parseDidUrl(input)
if (!allowFragment && fragment) {
throw SDKErrors.ERROR_INVALID_DID_FORMAT(input)
}
switch (type) {
case 'full':
if (!checkAddress(identifier, 38)[0]) {
throw SDKErrors.ERROR_ADDRESS_INVALID(identifier, 'DID identifier')
}
break
case 'light':
// Identifier includes the first two characters for the key type encoding
if (!checkAddress(identifier.substring(2), 38)[0]) {
throw SDKErrors.ERROR_ADDRESS_INVALID(identifier, 'DID identifier')
}
break
default:
throw SDKErrors.ERROR_UNSUPPORTED_DID(input)
}
return true
}
export function validateDidSignature(input: unknown): input is DidSignature {
try {
if (
!isHex((input as DidSignature).signature) ||
!validateKiltDid((input as DidSignature).keyId, true)
) {
throw SDKErrors.ERROR_SIGNATURE_DATA_TYPE()
}
return true
} catch (e) {
throw SDKErrors.ERROR_SIGNATURE_DATA_TYPE()
}
}
export function formatPublicKey(keypair: INewPublicKey): PublicKeyEnum {
const { type, publicKey } = keypair
return { [type]: publicKey }
}
export function isINewPublicKey(key: unknown): key is INewPublicKey {
if (typeof key === 'object') {
const { publicKey, type } = key as INewPublicKey
return publicKey instanceof Uint8Array && typeof type === 'string'
}
return false
}
export function encodeDidCreationOperation(
registry: Registry,
{ didIdentifier, submitter, keys = {}, endpoints = [] }: IDidCreationOptions
): DidCreationDetails {
const {
[KeyRelationship.assertionMethod]: assertionMethodKey,
[KeyRelationship.capabilityDelegation]: delegationKey,
[KeyRelationship.keyAgreement]: encryptionKey,
} = keys
// build did create object
const didCreateRaw = {
did: didIdentifier,
submitter,
newKeyAgreementKeys: encryptionKey ? [formatPublicKey(encryptionKey)] : [],
newAttestationKey: assertionMethodKey
? formatPublicKey(assertionMethodKey)
: undefined,
newDelegationKey: delegationKey
? formatPublicKey(delegationKey)
: undefined,
newServiceDetails: endpoints.map((service) => {
const { id, urls } = service
return { id, urls, serviceTypes: service.types }
}),
}
return new (registry.getOrThrow<DidCreationDetails>(
'DidDidDetailsDidCreationDetails'
))(registry, didCreateRaw)
}
export function encodeDidAuthorizedCallOperation(
registry: Registry,
{
didIdentifier,
txCounter,
call,
submitter,
blockNumber,
}: IAuthorizeCallOptions
): DidAuthorizedCallOperation {
return new (registry.getOrThrow<DidAuthorizedCallOperation>(
'DidAuthorizedCallOperation'
))(registry, {
did: didIdentifier,
txCounter,
call,
blockNumber,
submitter,
})
}
export function encodeServiceEndpoint(
registry: Registry,
endpoint: IDidServiceEndpoint
): IServiceEndpointChainRecordCodec {
return new (registry.getOrThrow<IServiceEndpointChainRecordCodec>(
'DidServiceEndpointsDidEndpoint'
))(registry, {
id: endpoint.id,
serviceTypes: endpoint.types,
urls: endpoint.urls,
})
}
export function encodeDidPublicKey(
registry: Registry,
key: INewPublicKey
): DidPublicKey {
let keyClass: string
if (
(Object.values(CHAIN_SUPPORTED_SIGNATURE_KEY_TYPES) as string[]).includes(
key.type
)
) {
keyClass = 'PublicVerificationKey'
} else if (
(Object.values(CHAIN_SUPPORTED_ENCRYPTION_KEY_TYPES) as string[]).includes(
key.type
)
) {
keyClass = 'PublicEncryptionKey'
} else {
throw TypeError(
`Unsupported key type; types currently recognized are ${Object.values(
CHAIN_SUPPORTED_KEY_TYPES
)}`
)
}
return new (registry.getOrThrow<DidPublicKey>('DidPublicKey'))(registry, {
[keyClass]: formatPublicKey(key),
})
}
export function computeKeyId(publicKey: DidPublicKey): string {
return Crypto.hashStr(publicKey.toU8a())
}
export type VerificationResult = {
verified: boolean
didDetails?: IDidDetails
key?: IDidKeyDetails
}
function verifyDidSignatureFromDetails({
message,
signature,
keyId,
keyRelationship,
didDetails,
}: {
message: string | Uint8Array
signature: string | Uint8Array
keyId: string
didDetails: IDidDetails
keyRelationship?: VerificationKeyRelationship
}): VerificationResult {
const key = keyRelationship
? didDetails?.getKeys(keyRelationship).find((k) => k.id === keyId)
: didDetails?.getKey(keyId)
if (
!key ||
key.controller !== didDetails.did ||
!SignatureAlgForKeyType[key.type]
)
return {
verified: false,
didDetails,
key,
}
return {
verified: Crypto.verify(message, signature, key.publicKeyHex),
didDetails,
key,
}
}
// Verify a DID signature given the key ID of the signature.
// A signature verification returns false if a migrated and then deleted DID is used.
export async function verifyDidSignature({
message,
signature,
keyId,
keyRelationship,
resolver = DefaultResolver,
}: {
message: string | Uint8Array
signature: string | Uint8Array
keyId: IDidKeyDetails['id']
resolver?: IDidResolver
keyRelationship?: VerificationKeyRelationship
}): Promise<VerificationResult> {
// resolveDoc can accept a key ID, but it will always return the DID details.
const resolutionDetails = await resolver.resolveDoc(keyId)
// Verification fails if the DID does not exist at all.
if (!resolutionDetails) {
return {
verified: false,
}
}
// Verification also fails if the DID has been deleted.
if (resolutionDetails.metadata.deactivated) {
return {
verified: false,
}
}
// Verification also fails if the signer is a migrated light DID.
if (resolutionDetails.metadata.canonicalId) {
return {
verified: false,
}
}
// Otherwise, the details used are either the migrated full DID details or the light DID details.
const didDetails = (
resolutionDetails.metadata.canonicalId
? (await resolver.resolveDoc(resolutionDetails.metadata.canonicalId))
?.details
: resolutionDetails.details
) as IDidDetails
return verifyDidSignatureFromDetails({
message,
signature,
keyId,
keyRelationship,
didDetails,
})
}
export async function writeDidFromPublicKeys(
signer: KeystoreSigner,
submitter: IIdentity['address'],
publicKeys: PublicKeyRoleAssignment
): Promise<{ extrinsic: SubmittableExtrinsic; did: string }> {
const { [KeyRelationship.authentication]: authenticationKey } = publicKeys
if (!authenticationKey)
throw Error(`${KeyRelationship.authentication} key is required`)
const didIdentifier = encodeAddress(authenticationKey.publicKey, 38)
const extrinsic = await generateCreateTx({
signer,
submitter,
didIdentifier,
keys: publicKeys,
alg: getSignatureAlgForKeyType(authenticationKey.type),
signingPublicKey: authenticationKey.publicKey,
})
const did = getKiltDidFromIdentifier(didIdentifier, 'full')
return { extrinsic, did }
}
export async function writeDidFromPublicKeysAndServices(
signer: KeystoreSigner,
submitter: IIdentity['address'],
publicKeys: PublicKeyRoleAssignment,
endpoints: IDidServiceEndpoint[]
): Promise<{ extrinsic: SubmittableExtrinsic; did: string }> {
const { [KeyRelationship.authentication]: authenticationKey } = publicKeys
if (!authenticationKey)
throw Error(`${KeyRelationship.authentication} key is required`)
const didIdentifier = encodeAddress(authenticationKey.publicKey, 38)
const extrinsic = await generateCreateTx({
signer,
submitter,
didIdentifier,
keys: publicKeys,
alg: getSignatureAlgForKeyType(authenticationKey.type),
signingPublicKey: authenticationKey.publicKey,
endpoints,
})
const did = getKiltDidFromIdentifier(didIdentifier, 'full')
return { extrinsic, did }
}
export function writeDidFromIdentity(
identity: IIdentity,
submitter: IIdentity['address']
): Promise<{ extrinsic: SubmittableExtrinsic; did: string }> {
const { signKeyringPair } = identity
const signer: KeystoreSigner = {
sign: ({ data }) =>
Promise.resolve({
data: signKeyringPair.sign(data),
alg: getSignatureAlgForKeyType(signKeyringPair.type) as any,
}),
}
return writeDidFromPublicKeys(signer, submitter, {
[KeyRelationship.authentication]: signKeyringPair,
[KeyRelationship.keyAgreement]: { ...identity.boxKeyPair, type: 'x25519' },
})
}
export async function signWithKey(
toSign: Uint8Array | string,
key: IDidKeyDetails,
signer: KeystoreSigner
): Promise<{ keyId: string; alg: string; signature: Uint8Array }> {
const alg = getSignatureAlgForKeyType(key.type)
const { data: signature } = await signer.sign({
publicKey: Crypto.coToUInt8(key.publicKeyHex),
alg,
data: Crypto.coToUInt8(toSign),
})
return { keyId: key.id, signature, alg }
}
export async function signWithDid(
toSign: Uint8Array | string,
did: IDidDetails,
signer: KeystoreSigner,
whichKey: KeyRelationship | IDidKeyDetails['id']
): Promise<{ keyId: string; alg: string; signature: Uint8Array }> {
let key: IDidKeyDetails | undefined
if (Object.values(KeyRelationship).includes(whichKey as KeyRelationship)) {
// eslint-disable-next-line prefer-destructuring
key = did.getKeys(KeyRelationship.authentication)[0]
} else {
key = did.getKey(whichKey)
}
if (!key) {
throw Error(
`failed to find key on FullDidDetails (${did.did}): ${whichKey}`
)
}
return signWithKey(toSign, key, signer)
}
export async function getDidAuthenticationSignature(
toSign: Uint8Array | string,
did: IDidDetails,
signer: KeystoreSigner
): Promise<DidSignature> {
const { keyId, signature } = await signWithDid(
toSign,
did,
signer,
KeyRelationship.authentication
)
return { keyId, signature: Crypto.u8aToHex(signature) }
}
export function assembleDidFragment(
didUri: IDidDetails['did'],
fragmentId: string
): string {
return `${didUri}#${fragmentId}`
}
// This function is tested in the DID integration tests, in the `DID migration` test case.
export async function upgradeDid(
lightDid: LightDidDetails,
submitter: IIdentity['address'],
signer: KeystoreSigner
): Promise<{ extrinsic: SubmittableExtrinsic; did: string }> {
const didAuthenticationKey = lightDid.getKeys(
KeyRelationship.authentication
)[0]
const didEncryptionKey = lightDid.getKeys(KeyRelationship.keyAgreement)[0]
const newDidPublicKeys: PublicKeyRoleAssignment = {
authentication: {
publicKey: hexToU8a(didAuthenticationKey.publicKeyHex),
type: didAuthenticationKey.type,
},
}
if (didEncryptionKey) {
newDidPublicKeys.keyAgreement = {
publicKey: hexToU8a(didEncryptionKey.publicKeyHex),
type: didEncryptionKey.type,
}
}
const adjustedServiceEndpoints = lightDid.getEndpoints().map((service) => {
// We are sure a fragment exists.
const id = parseDidUrl(service.id).fragment as string
// We remove the service ID prefix (did:light:...) before writing it on chain.
return { ...service, id }
})
return writeDidFromPublicKeysAndServices(
signer,
submitter,
newDidPublicKeys,
adjustedServiceEndpoints
)
} | the_stack |
import * as cp from "child_process";
import * as crypto from "crypto";
import * as fs from "fs-plus";
import * as path from "path";
import * as vscode from "vscode";
import * as sdk from "vscode-iot-device-cube-sdk";
import * as WinReg from "winreg";
import { IoTCubeCommands, RemoteContainersCommands, VscodeCommands, WorkbenchCommands } from "./common/Commands";
import { ArgumentEmptyOrNullError } from "./common/Error/OperationFailedErrors/ArgumentEmptyOrNullError";
import { OperationCanceledError } from "./common/Error/OperationCanceledError";
import { OperationFailedError } from "./common/Error/OperationFailedErrors/OperationFailedError";
import {
AzureFunctionsLanguage,
ConfigKey,
FileNames,
OperationType,
PlatformType,
ScaffoldType,
TemplateTag
} from "./constants";
import { DialogResponses } from "./DialogResponses";
import { FileUtility } from "./FileUtility";
import { ProjectHostType } from "./Models/Interfaces/ProjectHostType";
import { ProjectTemplate, TemplateFileInfo } from "./Models/Interfaces/ProjectTemplate";
import { Platform } from "./Models/Interfaces/ProjectTemplate";
import { IoTWorkbenchProjectBase } from "./Models/IoTWorkbenchProjectBase";
import { IoTWorkspaceProject } from "./Models/IoTWorkspaceProject";
import { RemoteExtension } from "./Models/RemoteExtension";
import { ProjectEnvironmentConfiger } from "./ProjectEnvironmentConfiger";
import { TelemetryContext, TelemetryResult } from "./telemetry";
import { WorkbenchExtension } from "./WorkbenchExtension";
import { WorkspaceNotOpenError } from "./common/Error/OperationFailedErrors/WorkspaceNotOpenError";
import { SystemResourceNotFoundError } from "./common/Error/SystemErrors/SystemResourceNotFoundError";
import { FileNotFoundError } from "./common/Error/OperationFailedErrors/FileNotFound";
import { DirectoryNotFoundError } from "./common/Error/OperationFailedErrors/DirectoryNotFoundError";
import { ConfigHandler } from "./configHandler";
const importLazy = require("import-lazy");
const ioTWorkspaceProjectModule = importLazy(() => require("./Models/IoTWorkspaceProject"))();
const ioTContainerizedProjectModule = importLazy(() => require("./Models/IoTContainerizedProject"))();
const raspberryPiDeviceModule = importLazy(() => require("./Models/RaspberryPiDevice"))();
export function delay(ms: number): Promise<unknown> {
return new Promise(resolve => setTimeout(resolve, ms));
}
export function channelShowAndAppend(channel: vscode.OutputChannel, message: string): void {
channel.show();
channel.append(message);
}
export function channelShowAndAppendLine(channel: vscode.OutputChannel, message: string): void {
channel.show();
channel.appendLine(message);
}
export function getRegistryValues(hive: string, key: string, name: string): Promise<string> {
return new Promise((resolve: (value: string) => void, reject: (value: Error) => void) => {
try {
const regKey = new WinReg({ hive, key });
regKey.valueExists(name, (e, exists) => {
if (e) {
return reject(e);
}
if (exists) {
regKey.get(name, (err, result) => {
if (!err) {
return resolve(result ? result.value : "");
} else {
return reject(err);
}
});
} else {
return resolve("");
}
});
} catch (ex) {
return reject(ex);
}
});
}
export function directoryExistsSync(dirPath: string): boolean {
try {
return fs.statSync(dirPath).isDirectory();
} catch (e) {
return false;
}
}
export function mkdirRecursivelySync(dirPath: string): void {
if (directoryExistsSync(dirPath)) {
return;
}
const dirname = path.dirname(dirPath);
if (path.normalize(dirname) === path.normalize(dirPath)) {
fs.mkdirSync(dirPath);
} else if (directoryExistsSync(dirname)) {
fs.mkdirSync(dirPath);
} else {
mkdirRecursivelySync(dirname);
fs.mkdirSync(dirPath);
}
}
export function fileExistsSync(filePath: string): boolean {
try {
return fs.statSync(filePath).isFile();
} catch (e) {
return false;
}
}
export function getScriptTemplateNameFromLanguage(language: string): string | undefined {
switch (language) {
case AzureFunctionsLanguage.CSharpScript:
return "IoTHubTrigger-CSharp";
case AzureFunctionsLanguage.JavaScript:
return "IoTHubTrigger-JavaScript";
case AzureFunctionsLanguage.CSharpLibrary:
return "Azure.Function.CSharp.IotHubTrigger.2.x";
default:
return undefined;
}
}
/**
* Provides additional options for QuickPickItems used in Azure Extensions
*/
export interface FolderQuickPickItem<T = undefined> extends vscode.QuickPickItem {
data: T;
}
/**
* Get project device root path.
* For iot workspace project, it is the "Device" folder.
* For iot containerized project, it is the root path.
*/
export function getProjectDeviceRootPath(): string {
if (
!vscode.workspace.workspaceFolders ||
vscode.workspace.workspaceFolders.length === 0 ||
!vscode.workspace.workspaceFolders[0].uri.fsPath
) {
return "";
}
// Try to get the "Device" folder
const devicePath = ConfigHandler.get<string>(ConfigKey.devicePath);
if (devicePath) {
const deviceFolder = vscode.workspace.workspaceFolders.find(
folder => path.basename(folder.uri.fsPath) === devicePath
);
if (deviceFolder) {
return deviceFolder.uri.fsPath;
}
}
// If no "Device" folder found in current workspace, return first workspace folder directly
return vscode.workspace.workspaceFolders[0].uri.fsPath;
}
export async function showOpenDialog(options: vscode.OpenDialogOptions): Promise<vscode.Uri[]> {
const result: vscode.Uri[] | undefined = await vscode.window.showOpenDialog(options);
if (!result) {
throw new OperationCanceledError("User cancelled the dialog");
} else {
return result;
}
}
export async function selectWorkspaceItem(
placeHolder: string,
options: vscode.OpenDialogOptions,
getSubPath?: (f: vscode.WorkspaceFolder) => string | undefined
): Promise<string> {
let folderPicks: Array<FolderQuickPickItem<string | undefined>> = [];
if (vscode.workspace.workspaceFolders) {
folderPicks = vscode.workspace.workspaceFolders.map((f: vscode.WorkspaceFolder) => {
let subpath: string | undefined;
if (getSubPath) {
subpath = getSubPath(f);
}
const fsPath: string = subpath ? path.join(f.uri.fsPath, subpath) : f.uri.fsPath;
return {
label: path.basename(fsPath),
description: fsPath,
data: fsPath
};
});
}
folderPicks.push({ label: "Browse...", description: "", data: undefined });
const folder = await vscode.window.showQuickPick(folderPicks, {
placeHolder,
ignoreFocusOut: true
});
if (!folder) {
throw new OperationCanceledError("User cancelled the workspace item selection process");
}
return folder && folder.data ? folder.data : (await showOpenDialog(options))[0].fsPath;
}
export async function selectWorkspaceFolder(
placeHolder: string,
getSubPath?: (f: vscode.WorkspaceFolder) => string | undefined
): Promise<string> {
return await selectWorkspaceItem(
placeHolder,
{
canSelectFiles: false,
canSelectFolders: true,
canSelectMany: false,
defaultUri:
vscode.workspace.workspaceFolders && vscode.workspace.workspaceFolders.length > 0
? vscode.workspace.workspaceFolders[0].uri
: undefined,
openLabel: "Select"
},
getSubPath
);
}
export function executeCommand(command: string): Promise<string> {
return new Promise((resolve, reject) => {
cp.exec(command, (error, stdout, stderr) => {
if (error) {
reject(error);
}
if (stderr) {
reject(stderr);
}
resolve(stdout);
});
});
}
export function runCommand(
command: string,
args: string[],
workingDir: string,
outputChannel: vscode.OutputChannel
): Thenable<object> {
return new Promise((resolve, reject) => {
const stdout = "";
const stderr = "";
const process = cp.spawn(command, args, { cwd: workingDir, shell: true });
process.stdout.on("data", (data: string) => {
console.log(data);
outputChannel.appendLine(data);
});
process.stderr.on("data", (data: string) => {
console.log(data);
outputChannel.appendLine(data);
});
process.on("error", error => reject({ error, stderr, stdout }));
process.on("close", status => {
if (status === 0) {
resolve({ status, stdout, stderr });
} else {
reject({ status, stdout, stderr });
}
});
});
}
/**
* Pop out information window suggesting user to configure project environment
* first.
*/
export async function askToConfigureEnvironment(
context: vscode.ExtensionContext,
channel: vscode.OutputChannel,
telemetryContext: TelemetryContext,
platform: PlatformType,
deviceRootPath: string,
scaffoldType: ScaffoldType,
operation: OperationType
): Promise<void> {
telemetryContext.properties.result = TelemetryResult.Failed;
channelShowAndAppendLine(channel, `${operation} operation failed because the project environment needs configuring.`);
const message = `${operation} operation failed because the project environment needs configuring. \
Do you want to configure project environment first?`;
const result: vscode.MessageItem | undefined = await vscode.window.showInformationMessage(
message,
DialogResponses.yes,
DialogResponses.no
);
if (result === DialogResponses.yes) {
telemetryContext.properties.errorMessage = `${operation} operation failed and user configures project environment.`;
await ProjectEnvironmentConfiger.configureProjectEnvironmentAsPlatform(
context,
channel,
telemetryContext,
platform,
deviceRootPath,
scaffoldType
);
const message = `Configuration of project environmnet done. \
You can run the ${operation.toLocaleLowerCase()} operation now.`;
channelShowAndAppendLine(channel, message);
vscode.window.showInformationMessage(message);
} else {
const message = `${operation} operation failed and user cancels to configure project environment.`;
throw new OperationCanceledError(message);
}
}
/**
* Ask user to open current IoT project folder as workspace.
* @param rootPath project root path
* @param workspaceFile iot workspace config file
* @param telemetryContext telemetry context
*/
export async function askAndOpenProject(
rootPath: string,
workspaceFile: string,
telemetryContext: TelemetryContext
): Promise<void> {
telemetryContext.properties.result = TelemetryResult.Failed;
const message = `Operation failed because the IoT project is not opened. \
Current folder contains an IoT project '${workspaceFile}', do you want to open it?`;
const result: vscode.MessageItem | undefined = await vscode.window.showInformationMessage(
message,
DialogResponses.yes,
DialogResponses.no
);
if (result === DialogResponses.yes) {
telemetryContext.properties.errorMessage = "Operation failed and user opens project folder as workspace.";
const workspaceFilePath = path.join(rootPath, workspaceFile);
await vscode.commands.executeCommand(IoTCubeCommands.OpenLocally, workspaceFilePath, false);
} else {
throw new OperationCanceledError(`Operation failed and user cancels to open current folder as workspace.`);
}
}
/**
* Ask user to open project in remote before operation execution.
* @param operation compile or upload device code operation
* @param channel output channel
* @param telemetryContext telemetry context
*/
export async function askAndOpenInRemote(operation: OperationType, telemetryContext: TelemetryContext): Promise<void> {
telemetryContext.properties.result = TelemetryResult.Failed;
const message = `${operation} can only be executed in remote container. \
Do you want to reopen the IoT project in container?`;
const result: vscode.MessageItem | undefined = await vscode.window.showInformationMessage(
message,
DialogResponses.yes,
DialogResponses.no
);
if (result === DialogResponses.yes) {
telemetryContext.properties.errorMessage = `${operation} operation failed and user reopens project in container.`;
await RemoteExtension.checkRemoteExtension("open project in container");
await vscode.commands.executeCommand(RemoteContainersCommands.ReopenInContainer);
} else {
throw new OperationCanceledError(`${operation} operation failed and user cancels to reopen project in container.`);
}
}
const noDeviceSurveyUrl = "https://www.surveymonkey.com/r/C7NY7KJ";
export async function takeNoDeviceSurvey(
telemetryContext: TelemetryContext,
context: vscode.ExtensionContext
): Promise<void> {
const message =
"Could you help to take a quick survey about what IoT development kit(s) you want Azure IoT Device Workbench to support?";
const result: vscode.MessageItem | undefined = await vscode.window.showWarningMessage(
message,
DialogResponses.yes,
DialogResponses.cancel
);
if (result === DialogResponses.yes) {
// Open the survey page
telemetryContext.properties.message = "User takes no-device survey.";
telemetryContext.properties.result = TelemetryResult.Succeeded;
const extension = WorkbenchExtension.getExtension(context);
if (!extension) {
return;
}
const extensionVersion = extension.packageJSON.version || "unknown";
await vscode.commands.executeCommand(
VscodeCommands.VscodeOpen,
vscode.Uri.parse(
`${noDeviceSurveyUrl}?o=${encodeURIComponent(process.platform)}&v=${encodeURIComponent(extensionVersion)}`
)
);
}
return;
}
export async function getTemplateFilesInfo(templateFolder: string): Promise<TemplateFileInfo[]> {
const templateFilesInfo: TemplateFileInfo[] = [];
const templateFiles = path.join(templateFolder, FileNames.templateFiles);
if (!(await FileUtility.fileExists(ScaffoldType.Local, templateFiles))) {
throw new FileNotFoundError("get template files info", `template files ${templateFiles}`, "");
}
const templateFilesJson = JSON.parse(fs.readFileSync(templateFiles, "utf8"));
templateFilesJson.templateFiles.forEach((fileInfo: TemplateFileInfo) => {
const filePath = path.join(templateFolder, fileInfo.sourcePath, fileInfo.fileName);
const fileContent = fs.readFileSync(filePath, "utf8");
templateFilesInfo.push({
fileName: fileInfo.fileName,
sourcePath: fileInfo.sourcePath,
targetPath: fileInfo.targetPath,
overwrite: typeof fileInfo.overwrite !== "undefined" ? fileInfo.overwrite : true, // if it is not defined, we will overwrite the existing file.
fileContent
});
});
return templateFilesInfo;
}
export async function generateTemplateFile(
root: string,
type: ScaffoldType,
fileInfo: TemplateFileInfo
): Promise<void> {
const targetFolderPath = path.join(root, fileInfo.targetPath);
if (!(await FileUtility.directoryExists(type, targetFolderPath))) {
await FileUtility.mkdirRecursively(type, targetFolderPath);
}
const targetFilePath = path.join(targetFolderPath, fileInfo.fileName);
if (fileInfo.fileContent) {
const fileExist = await FileUtility.fileExists(type, targetFilePath);
if (fileInfo.overwrite || !fileExist) {
await FileUtility.writeFile(type, targetFilePath, fileInfo.fileContent);
}
}
return;
}
export function channelPrintJsonObject(
channel: vscode.OutputChannel,
// eslint-disable-next-line @typescript-eslint/no-explicit-any
data: any
): void {
const indentationSpace = 4;
const jsonString = JSON.stringify(data, null, indentationSpace);
channelShowAndAppendLine(channel, jsonString);
}
/**
* If external project, ask whether to configure the project to be IoT Container
* Project or create an IoT Project
*/
export async function handleExternalProject(telemetryContext: TelemetryContext): Promise<void> {
telemetryContext.properties.result = TelemetryResult.Failed;
const message =
"An IoT project is needed to process the operation, do you want to configure current project to be an IoT Embedded Linux Project or create an IoT project?";
class Choice {
static configureAsContainerProject: vscode.MessageItem = {
title: "Configure as Embedded Linux Project"
};
static createNewProject: vscode.MessageItem = { title: "Create IoT Project" };
}
const result: vscode.MessageItem | undefined = await vscode.window.showInformationMessage(
message,
Choice.configureAsContainerProject,
Choice.createNewProject
);
if (result === Choice.configureAsContainerProject) {
telemetryContext.properties.errorMessage =
"Operation failed and user configures external project to be an IoT Embedded Linux Project";
await vscode.commands.executeCommand(WorkbenchCommands.ConfigureProjectEnvironment);
} else if (result === Choice.createNewProject) {
telemetryContext.properties.errorMessage = "Operation failed and user creates new project";
await vscode.commands.executeCommand(WorkbenchCommands.InitializeProject);
} else {
throw new OperationCanceledError(`Operation failed and user cancels to configure external project.`);
}
}
/**
* Get project configs from iot workbench project file
* @param type Scaffold type
*/
export async function getProjectConfig(
type: ScaffoldType,
iotWorkbenchProjectFilePath: string
// eslint-disable-next-line @typescript-eslint/no-explicit-any
): Promise<any> {
let projectConfig: { [key: string]: string } = {};
if (iotWorkbenchProjectFilePath && (await FileUtility.fileExists(type, iotWorkbenchProjectFilePath))) {
const projectConfigContent = ((await FileUtility.readFile(
type,
iotWorkbenchProjectFilePath,
"utf8"
)) as string).trim();
if (projectConfigContent) {
projectConfig = JSON.parse(projectConfigContent);
}
}
return projectConfig;
}
export function getWorkspaceFile(rootPath: string): string {
if (!rootPath) {
throw new ArgumentEmptyOrNullError("get workspace file", "root path");
}
const workspaceFiles = fs
.readdirSync(rootPath)
.filter(file => path.extname(file).endsWith(FileNames.workspaceExtensionName));
if (workspaceFiles && workspaceFiles.length >= 0) {
return workspaceFiles[0];
} else {
return "";
}
}
/**
* Update project host type configuration in iot workbench project file.
* Create one if not exists.
* @param type Scaffold type
*/
export async function updateProjectHostTypeConfig(
type: ScaffoldType,
iotWorkbenchProjectFilePath: string,
projectHostType: ProjectHostType
): Promise<void> {
if (!iotWorkbenchProjectFilePath) {
throw new ArgumentEmptyOrNullError("update project host type configuration", "iot workbench project file path");
}
// Get original configs from config file
const projectConfig = await getProjectConfig(type, iotWorkbenchProjectFilePath);
// Update project host type
projectConfig[`${ConfigKey.projectHostType}`] = ProjectHostType[projectHostType];
// Add config version for easier backward compatibility in the future.
const workbenchVersion = "1.0.0";
if (!projectConfig[`${ConfigKey.workbenchVersion}`]) {
projectConfig[`${ConfigKey.workbenchVersion}`] = workbenchVersion;
}
await FileUtility.writeJsonFile(type, iotWorkbenchProjectFilePath, projectConfig);
}
/**
* Config External CMake Project config file as an IoT Workbench Container
* Project. Throw cancel operation error if not CMake project. Update project
* host type and board id in IoT Workbench project file.
* @param scaffoldType
*/
export async function configExternalCMakeProjectToIoTContainerProject(scaffoldType: ScaffoldType): Promise<void> {
const projectRootPath = getProjectDeviceRootPath();
if (!projectRootPath) {
throw new WorkspaceNotOpenError("configure external CMake project to IoT container project");
}
// Check if it is a cmake project
const cmakeFile = path.join(projectRootPath, FileNames.cmakeFileName);
if (!(await FileUtility.fileExists(scaffoldType, cmakeFile))) {
const message = `Missing ${FileNames.cmakeFileName} to be configured as Embedded Linux project.`;
vscode.window.showWarningMessage(message);
throw new OperationCanceledError(message);
}
const iotWorkbenchProjectFile = path.join(projectRootPath, FileNames.iotWorkbenchProjectFileName);
// Update project host type in IoT Workbench Project file
await updateProjectHostTypeConfig(scaffoldType, iotWorkbenchProjectFile, ProjectHostType.Container);
// Update board Id as Raspberry Pi in IoT Workbench Project file
const projectConfig = await getProjectConfig(scaffoldType, iotWorkbenchProjectFile);
projectConfig[`${ConfigKey.boardId}`] = raspberryPiDeviceModule.RaspberryPiDevice.boardId;
await FileUtility.writeJsonFile(scaffoldType, iotWorkbenchProjectFile, projectConfig);
}
/**
* Used when it is an IoT workspace project but not open correctly.
* Ask to open as workspace.
*/
export async function properlyOpenIoTWorkspaceProject(telemetryContext: TelemetryContext): Promise<void> {
const rootPath = getProjectDeviceRootPath();
if (!rootPath) {
throw new WorkspaceNotOpenError("properly open IoT workspace project");
}
const workbenchFileName = path.join(
rootPath,
IoTWorkspaceProject.folderName.deviceDefaultFolderName,
FileNames.iotWorkbenchProjectFileName
);
const workspaceFile = getWorkspaceFile(rootPath);
if (fs.existsSync(workbenchFileName) && workspaceFile) {
await askAndOpenProject(rootPath, workspaceFile, telemetryContext);
}
}
export function isWorkspaceProject(): boolean {
const rootPath = getProjectDeviceRootPath();
if (!rootPath) {
return false;
}
const workbenchFileName = path.join(
rootPath,
IoTWorkspaceProject.folderName.deviceDefaultFolderName,
FileNames.iotWorkbenchProjectFileName
);
const workspaceFile = getWorkspaceFile(rootPath);
if (fs.existsSync(workbenchFileName) && workspaceFile) {
return true;
}
return false;
}
/**
* Construct and load iot project.
* If this function is triggered by extension load, load project and ignore any
* error. If this function is triggered by command execution, load project,
* check project validation and throw error if any.
*/
export async function constructAndLoadIoTProject(
context: vscode.ExtensionContext,
channel: vscode.OutputChannel,
telemetryContext: TelemetryContext,
isTriggeredWhenExtensionLoad = false
): Promise<IoTWorkbenchProjectBase | undefined> {
const scaffoldType = ScaffoldType.Workspace;
const projectFileRootPath = getProjectDeviceRootPath();
const projectHostType = await IoTWorkbenchProjectBase.getProjectType(scaffoldType, projectFileRootPath);
let iotProject;
if (projectHostType === ProjectHostType.Container) {
iotProject = new ioTContainerizedProjectModule.IoTContainerizedProject(
context,
channel,
telemetryContext,
projectFileRootPath
);
} else if (projectHostType === ProjectHostType.Workspace) {
const projectRootPath = path.join(projectFileRootPath, "..");
iotProject = new ioTWorkspaceProjectModule.IoTWorkspaceProject(context, channel, telemetryContext, projectRootPath);
}
if (isTriggeredWhenExtensionLoad) {
if (iotProject) {
try {
await iotProject.load(scaffoldType, true);
} catch (error) {
// Just try to load the project at extension load time. Ignore error
}
}
return;
}
// IoT Workspace Project improperly open as folder,
// or external project.
if (!iotProject) {
const isIoTWorkspaceProject = isWorkspaceProject();
if (isIoTWorkspaceProject) {
// If current folder is an IoT Workspace Project but not open correctly,
// ask to open properly
await properlyOpenIoTWorkspaceProject(telemetryContext);
} else {
// If external project
await handleExternalProject(telemetryContext);
}
return;
}
await iotProject.load(scaffoldType);
return iotProject;
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export function getEnumKeyByEnumValue(myEnum: any, enumValue: any): any {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const keys = Object.keys(myEnum).filter(x => myEnum[x] === enumValue);
const key = keys.length > 0 ? keys[0] : null;
if (key === null) {
return undefined;
}
return myEnum[key];
}
export async function selectPlatform(
type: ScaffoldType,
context: vscode.ExtensionContext
): Promise<vscode.QuickPickItem | undefined> {
const platformListPath = context.asAbsolutePath(
path.join(FileNames.resourcesFolderName, FileNames.templatesFolderName, FileNames.platformListFileName)
);
const platformListJsonString = (await FileUtility.readFile(type, platformListPath, "utf8")) as string;
const platformListJson = JSON.parse(platformListJsonString);
const platformList: vscode.QuickPickItem[] = [];
platformListJson.platforms.forEach((platform: Platform) => {
platformList.push({
label: platform.name,
description: platform.description
});
});
const platformSelection = await vscode.window.showQuickPick(platformList, {
ignoreFocusOut: true,
matchOnDescription: true,
matchOnDetail: true,
placeHolder: "Select a platform"
});
return platformSelection;
}
enum OverwriteLabel {
No = "No",
YesToAll = "Yes to all"
}
/**
* Ask whether to overwrite all configuration files
*/
export async function askToOverwriteFile(fileName: string): Promise<vscode.QuickPickItem> {
const overwriteTasksJsonOption: vscode.QuickPickItem[] = [];
overwriteTasksJsonOption.push(
{
label: OverwriteLabel.No,
detail: "Do not overwrite existed file and cancel the configuration process."
},
{
label: OverwriteLabel.YesToAll,
detail: "Automatically overwrite all configuration files."
}
);
const overwriteSelection = await vscode.window.showQuickPick(overwriteTasksJsonOption, {
ignoreFocusOut: true,
placeHolder: `Configuration file ${fileName} already exists. \
Do you want to overwrite all existed configuration files or cancel the configuration process?`
});
if (!overwriteSelection) {
// Selection was cancelled
throw new OperationCanceledError(`Ask to overwrite ${fileName} selection cancelled.`);
}
return overwriteSelection;
}
/**
* If one of any configuration files already exists, ask to overwrite all or
* cancel configuration process.
* @returns true - overwrite all configuration files; false - cancel
* configuration process.
*/
export async function askToOverwrite(
scaffoldType: ScaffoldType,
projectPath: string,
templateFilesInfo: TemplateFileInfo[]
): Promise<boolean> {
// Check whether configuration file exists
for (const fileInfo of templateFilesInfo) {
const targetFilePath = path.join(projectPath, fileInfo.targetPath, fileInfo.fileName);
if (await FileUtility.fileExists(scaffoldType, targetFilePath)) {
const fileOverwrite = await askToOverwriteFile(fileInfo.fileName);
return fileOverwrite.label === OverwriteLabel.YesToAll;
}
}
// No files exist, overwrite directly.
return true;
}
export async function fetchAndExecuteTask(
context: vscode.ExtensionContext,
channel: vscode.OutputChannel,
telemetryContext: TelemetryContext,
deviceRootPath: string,
operationType: OperationType,
platform: PlatformType,
taskName: string
): Promise<void> {
const scaffoldType = ScaffoldType.Workspace;
if (!(await FileUtility.directoryExists(scaffoldType, deviceRootPath))) {
throw new DirectoryNotFoundError("fetch and execute task", `device root folder ${deviceRootPath}`, "");
}
const tasks = await vscode.tasks.fetchTasks();
if (!tasks || tasks.length < 1) {
const message = `Failed to fetch tasks.`;
channelShowAndAppendLine(channel, message);
await askToConfigureEnvironment(
context,
channel,
telemetryContext,
platform,
deviceRootPath,
scaffoldType,
operationType
);
return;
}
const operationTask = tasks.filter(task => {
return task.name === taskName;
});
if (!operationTask || operationTask.length < 1) {
const message = `Failed to fetch default ${operationType.toLowerCase()} task with task name ${taskName}.`;
channelShowAndAppendLine(channel, message);
await askToConfigureEnvironment(
context,
channel,
telemetryContext,
platform,
deviceRootPath,
scaffoldType,
operationType
);
return;
}
try {
await vscode.tasks.executeTask(operationTask[0]);
} catch (error) {
throw new OperationFailedError(`execute task to ${operationType.toLowerCase()}`, `${error.message}`, "");
}
return;
}
/**
* Get template list json object
*/
export async function getTemplateJson(
context: vscode.ExtensionContext,
scaffoldType: ScaffoldType
// eslint-disable-next-line @typescript-eslint/no-explicit-any
): Promise<any> {
const templateJsonFilePath = context.asAbsolutePath(
path.join(FileNames.resourcesFolderName, FileNames.templatesFolderName, FileNames.templateFileName)
);
const templateJsonFileString = (await FileUtility.readFile(scaffoldType, templateJsonFilePath, "utf8")) as string;
const templateJson = JSON.parse(templateJsonFileString);
return templateJson;
}
/**
* Get environment development template files with template name, and ask to
* overwrite files if any exists
*/
export async function getEnvTemplateFilesAndAskOverwrite(
context: vscode.ExtensionContext,
projectPath: string,
scaffoldType: ScaffoldType,
templateName: string
): Promise<TemplateFileInfo[]> {
if (!projectPath) {
throw new ArgumentEmptyOrNullError("project path", "Please open the folder and initialize project again.");
}
const templateJson = await getTemplateJson(context, scaffoldType);
// Get environment template files
const projectEnvTemplate: ProjectTemplate[] = templateJson.templates.filter((template: ProjectTemplate) => {
return template.tag === TemplateTag.DevelopmentEnvironment && template.name === templateName;
});
if (projectEnvTemplate.length === 0) {
throw new SystemResourceNotFoundError(
"environment template files",
`template tag ${TemplateTag.DevelopmentEnvironment} and template name ${templateName}`,
"template Json file"
);
}
const templateFolderName = projectEnvTemplate[0].path;
const templateFolder = context.asAbsolutePath(
path.join(FileNames.resourcesFolderName, FileNames.templatesFolderName, templateFolderName)
);
const templateFilesInfo: TemplateFileInfo[] = await getTemplateFilesInfo(templateFolder);
// Ask overwrite or not
let overwriteAll = false;
overwriteAll = await askToOverwrite(scaffoldType, projectPath, templateFilesInfo);
if (!overwriteAll) {
const message = "Do not overwrite configuration files and cancel configuration process.";
throw new OperationCanceledError(message);
}
return templateFilesInfo;
}
export async function getPlatform(): Promise<string> {
const localOs = sdk.Utility.require("os") as typeof import("os");
const getPlatform = await localOs.platform;
const platform = await getPlatform();
return platform;
}
export async function getHomeDir(): Promise<string> {
const localOs = sdk.Utility.require("os") as typeof import("os");
const getHomeDir = await localOs.homedir;
const homeDir = await getHomeDir();
return homeDir;
}
/**
* Whether to pop up landing page or not.
* If this is the first time user use workbench, then pop up landing page.
* If this is not the first time, don't pop up.
*/
export function shouldShowLandingPage(context: vscode.ExtensionContext): boolean {
const hasPopUp = context.globalState.get<boolean>(ConfigKey.hasPopUp, false);
return !hasPopUp;
}
/**
* Hash a string and get hash value.
* @param stringToHash string to hash
* @param algorithm hash algorithm
*/
export function getHashFromString(stringToHash: string, algorithm = "md5"): string {
const hash = crypto.createHash(algorithm);
hash.update(stringToHash);
const hashValue = hash.digest("hex");
return hashValue;
} | the_stack |
import {
Datasets,
DataPoint,
RenderInfo,
BulletInfo,
Dataset,
Size,
Transform,
ChartElements,
GraphType,
ValueType,
} from "./data";
import * as helper from "./helper";
import * as d3 from "d3";
import * as expr from "./expr";
function createAreas(
canvas: HTMLElement,
renderInfo: RenderInfo,
bulletInfo: BulletInfo
): ChartElements {
let chartElements: ChartElements = {};
// whole area for plotting, includes margins
if (!renderInfo || !bulletInfo) return;
let svg = d3
.select(canvas)
.append("svg")
.attr("id", "svg")
.attr(
"width",
renderInfo.dataAreaSize.width +
renderInfo.margin.left +
renderInfo.margin.right
)
.attr(
"height",
renderInfo.dataAreaSize.height +
renderInfo.margin.top +
renderInfo.margin.bottom
);
chartElements["svg"] = svg;
// graphArea, includes chartArea, title, legend
let graphArea = svg
.append("g")
.attr("id", "graphArea")
.attr(
"transform",
"translate(" +
renderInfo.margin.left +
"," +
renderInfo.margin.top +
")"
)
.attr("width", renderInfo.dataAreaSize.width + renderInfo.margin.right)
.attr(
"height",
renderInfo.dataAreaSize.height + renderInfo.margin.bottom
);
chartElements["graphArea"] = graphArea;
// dataArea, under graphArea, includes points, lines, xAxis, yAxis
let dataArea = graphArea
.append("g")
.attr("id", "dataArea")
.attr("width", renderInfo.dataAreaSize.width)
.attr("height", renderInfo.dataAreaSize.height);
chartElements["dataArea"] = dataArea;
return chartElements;
}
function setChartScale(
_canvas: HTMLElement,
chartElements: ChartElements,
renderInfo: RenderInfo
) {
let canvas = d3.select(_canvas);
let svg = chartElements.svg;
let svgWidth = parseFloat(svg.attr("width"));
let svgHeight = parseFloat(svg.attr("height"));
svg.attr("width", null)
.attr("height", null)
.attr("viewBox", `0 0 ${svgWidth} ${svgHeight}`)
.attr("preserveAspectRatio", "xMidYMid meet");
if (renderInfo.fitPanelWidth) {
canvas.style("width", "100%");
} else {
canvas.style(
"width",
(svgWidth * renderInfo.fixedScale).toString() + "px"
);
canvas.style(
"height",
(svgHeight * renderInfo.fixedScale).toString() + "px"
);
}
}
function renderTitle(
chartElements: ChartElements,
renderInfo: RenderInfo,
bulletInfo: BulletInfo
) {
// console.log("renderTitle");
// under graphArea
if (!renderInfo || !bulletInfo) return;
const spacing = 6; // spacing between title and dataArea
if (bulletInfo.title) {
let titleSize = helper.measureTextSize(
bulletInfo.title,
"tracker-title-small"
);
if (bulletInfo.orientation === "horizontal") {
let title = chartElements.graphArea
.append("text")
.text(bulletInfo.title) // pivot at center
.attr("id", "title")
.attr("x", titleSize.width / 2.0)
.attr("y", renderInfo.dataAreaSize.height / 2.0)
.attr("height", titleSize.height) // for later use
.attr("class", "tracker-title-small");
chartElements["title"] = title;
// Expand parent areas
helper.expandArea(chartElements.svg, titleSize.width + spacing, 0);
helper.expandArea(
chartElements.graphArea,
titleSize.width + spacing,
0
);
// Move sibling areas
helper.moveArea(
chartElements.dataArea,
titleSize.width + spacing,
0
);
} else if (bulletInfo.orientation === "vertical") {
// if label width > dataArea width
let xMiddle = renderInfo.dataAreaSize.width / 2.0;
if (titleSize.width > renderInfo.dataAreaSize.width) {
// console.log("expand area for vertical title");
helper.expandArea(
chartElements.svg,
titleSize.width - renderInfo.dataAreaSize.width,
0
);
helper.expandArea(
chartElements.graphArea,
titleSize.width - renderInfo.dataAreaSize.width,
0
);
helper.moveArea(
chartElements.dataArea,
titleSize.width / 2.0 - renderInfo.dataAreaSize.width / 2.0,
0
);
xMiddle = titleSize.width / 2.0;
}
let axisWidth = parseFloat(chartElements.axis.attr("width"));
let title = chartElements.graphArea
.append("text")
.text(bulletInfo.title) // pivot at center
.attr("id", "title")
.attr("x", xMiddle + axisWidth)
.attr("y", titleSize.height / 2.0)
.attr("height", titleSize.height) // for later use
.attr("class", "tracker-title-small");
chartElements["title"] = title;
// Expand parent areas
helper.expandArea(chartElements.svg, 0, titleSize.height + spacing);
helper.expandArea(
chartElements.graphArea,
0,
titleSize.height + spacing
);
// Move sibling areas
helper.moveArea(
chartElements.dataArea,
0,
titleSize.height + spacing
);
}
}
if (bulletInfo.valueUnit) {
let unitSize = helper.measureTextSize(
bulletInfo.valueUnit,
"tracker-tick-label"
);
if (bulletInfo.orientation === "horizontal") {
let unit = chartElements.dataArea
.append("text")
.text(bulletInfo.valueUnit)
.attr("id", "unit")
.attr("x", -1 * (unitSize.width + spacing))
.attr("y", renderInfo.dataAreaSize.height + spacing)
.attr("height", unitSize.height) // for later use
.attr("class", "tracker-tick-label"); // pivot at corder
chartElements["unit"] = unit;
} else if (bulletInfo.orientation === "vertical") {
let unit = chartElements.dataArea
.append("text")
.text(bulletInfo.valueUnit)
.attr("id", "unit")
.attr(
"x",
renderInfo.dataAreaSize.width / 2 - unitSize.width / 2
)
.attr("y", -(unitSize.height / 2.0 + spacing))
.attr("height", unitSize.height) // for later use
.attr("class", "tracker-tick-label"); // pivot at corder
chartElements["unit"] = unit;
// Expand parent areas
helper.expandArea(chartElements.svg, 0, unitSize.height + spacing);
helper.expandArea(
chartElements.graphArea,
0,
unitSize.height + spacing
);
// Move dataArea down
helper.moveArea(
chartElements.dataArea,
0,
unitSize.height + spacing
);
}
}
}
// Render ticks, tick labels
function renderAxis(
chartElements: ChartElements,
renderInfo: RenderInfo,
bulletInfo: BulletInfo,
dataset: Dataset
) {
// console.log("renderAxis");
// console.log(chartElements);
// console.log(dataset);
if (!renderInfo || !bulletInfo) return;
let range = bulletInfo.range;
let lastRange = range[range.length - 1];
let domain = [0, lastRange];
let tickLength = 6;
let valueUnit = bulletInfo.valueUnit;
let tickFormatFn: any = function (value: any) {
if (valueUnit && valueUnit.endsWith("%")) {
return d3.tickFormat(0, lastRange, 7)(value) + " %";
}
return d3.tickFormat(0, lastRange, 7)(value);
};
let maxTickLabel = tickFormatFn(lastRange);
let maxTickLabelSize = helper.measureTextSize(
maxTickLabel,
"tracker-tick-label"
);
if (bulletInfo.orientation === "horizontal") {
let scale = d3.scaleLinear();
scale.domain(domain).range([0, renderInfo.dataAreaSize.width]);
chartElements["scale"] = scale;
let axisGen = d3.axisBottom(scale);
axisGen.tickFormat(tickFormatFn);
let axis = chartElements.dataArea
.append("g")
.attr("id", "axis")
.attr(
"transform",
"translate(0," + renderInfo.dataAreaSize.height + ")"
)
.call(axisGen)
.attr("class", "tracker-axis");
chartElements["axis"] = axis;
let axisLine = axis.selectAll("path").style("stroke", "none");
let axisTicks = axis.selectAll("line");
let axisTickLabels = axis
.selectAll("text")
.attr("class", "tracker-tick-label");
axis.attr(
"width",
renderInfo.dataAreaSize.width + maxTickLabelSize.width
);
axis.attr("height", tickLength + maxTickLabelSize.height);
// Expand areas
helper.expandArea(
chartElements.svg,
+maxTickLabelSize.width,
tickLength + maxTickLabelSize.height
);
helper.expandArea(
chartElements.graphArea,
+maxTickLabelSize.width,
tickLength + maxTickLabelSize.height
);
} else if (bulletInfo.orientation === "vertical") {
let scale = d3.scaleLinear();
scale.domain(domain).range([renderInfo.dataAreaSize.height, 0]);
chartElements["scale"] = scale;
let axisGen = d3.axisLeft(scale);
axisGen.tickFormat(tickFormatFn);
let axis = chartElements.dataArea
.append("g")
.attr("id", "axis")
.attr("x", 0)
.attr("y", 0)
.call(axisGen)
.attr("class", "tracker-axis");
chartElements["axis"] = axis;
let axisLine = axis.selectAll("path").style("stroke", "none");
let axisTicks = axis.selectAll("line");
let axisTickLabels = axis
.selectAll("text")
.attr("class", "tracker-tick-label");
axis.attr("width", tickLength + maxTickLabelSize.width);
axis.attr("height", renderInfo.dataAreaSize.width);
// Expand areas
helper.expandArea(
chartElements.svg,
tickLength + maxTickLabelSize.width,
0
);
helper.expandArea(
chartElements.graphArea,
tickLength + maxTickLabelSize.width,
0
);
helper.moveArea(
chartElements.dataArea,
tickLength + maxTickLabelSize.width,
0
);
}
}
// Render quantitative range, poor/average/good/...
function renderBackPanel(
chartElements: ChartElements,
renderInfo: RenderInfo,
bulletInfo: BulletInfo,
dataset: Dataset
) {
// console.log("renderBackPanel");
// console.log(dataset);
if (!renderInfo || !bulletInfo) return;
let scale = chartElements.scale;
// Prepare data
let range = bulletInfo.range;
let rangeColor = bulletInfo.rangeColor;
let data = [];
let lastBound = 0;
for (let ind = 0; ind < range.length; ind++) {
data.push({
start: lastBound,
end: range[ind],
color: rangeColor[ind],
});
lastBound = range[ind];
}
if (bulletInfo.orientation === "horizontal") {
let panel = chartElements.dataArea
.selectAll("backPanel")
.data(data)
.enter()
.append("rect")
.attr("x", function (d: any, i: number) {
return Math.floor(scale(d.start));
})
.attr("y", function (d: any) {
return 0;
})
.attr("width", function (d: any, i: number) {
return Math.ceil(scale(d.end - d.start));
})
.attr("height", renderInfo.dataAreaSize.height)
.style("fill", function (d: any) {
return d.color;
});
} else if (bulletInfo.orientation === "vertical") {
let panel = chartElements.dataArea
.selectAll("backPanel")
.data(data)
.enter()
.append("rect")
.attr("x", function (d: any, i: number) {
return 0;
})
.attr("y", function (d: any) {
return Math.floor(scale(d.end));
})
.attr("width", renderInfo.dataAreaSize.width)
.attr("height", function (d: any) {
return (
renderInfo.dataAreaSize.height -
Math.floor(scale(d.end - d.start))
);
})
.style("fill", function (d: any) {
return d.color;
});
}
}
// Render bar for actual value
function renderBar(
chartElements: ChartElements,
renderInfo: RenderInfo,
bulletInfo: BulletInfo,
dataset: Dataset
) {
// console.log("renderBar");
// console.log(dataset);
let errorMessage = "";
if (!renderInfo || !bulletInfo) return;
let retActualValue = expr.resolveValue(bulletInfo.value, renderInfo);
// console.log(retActualValue);
if (typeof retActualValue === "string") {
return retActualValue;
}
let actualValue = retActualValue;
if (Number.isNaN(actualValue)) {
errorMessage = "Invalid input value: " + retActualValue;
return errorMessage;
}
let valueColor = bulletInfo.valueColor;
let scale = chartElements.scale;
if (bulletInfo.orientation === "horizontal") {
let barWidth = renderInfo.dataAreaSize.height / 3;
let bar = chartElements.dataArea
.append("rect")
.attr("x", scale(0))
.attr("y", barWidth)
.attr("width", Math.floor(scale(actualValue)))
.attr("height", barWidth)
.style("fill", valueColor);
} else if (bulletInfo.orientation === "vertical") {
let barWidth = renderInfo.dataAreaSize.width / 3;
let bar = chartElements.dataArea
.append("rect")
.attr("x", barWidth)
.attr("y", Math.floor(scale(actualValue)))
.attr("width", barWidth)
.attr(
"height",
renderInfo.dataAreaSize.height - Math.floor(scale(actualValue))
)
.style("fill", valueColor);
}
}
// Render mark line for target value
function renderMark(
chartElements: ChartElements,
renderInfo: RenderInfo,
bulletInfo: BulletInfo,
dataset: Dataset
) {
// console.log("renderMark");
// console.log(dataset);
if (!renderInfo || !bulletInfo) return;
let showMarker = bulletInfo.showMarker;
if (!showMarker) return;
let markerValue = bulletInfo.markerValue;
let markerColor = bulletInfo.markerColor;
let scale = chartElements.scale;
if (bulletInfo.orientation === "horizontal") {
let markerLength = (renderInfo.dataAreaSize.height * 2) / 3;
let marker = chartElements.dataArea
.append("rect")
.attr("x", scale(markerValue) - 1.5)
.attr("y", markerLength / 4)
.attr("width", 3)
.attr("height", markerLength)
.style("fill", markerColor);
} else if (bulletInfo.orientation === "vertical") {
let markerLength = (renderInfo.dataAreaSize.width * 2) / 3;
let marker = chartElements.dataArea
.append("rect")
.attr("x", markerLength / 4)
.attr("y", scale(markerValue) - 1.5)
.attr("width", markerLength)
.attr("height", 3)
.style("fill", markerColor);
}
}
// Bullet graph https://en.wikipedia.org/wiki/Bullet_graph
export function renderBullet(
canvas: HTMLElement,
renderInfo: RenderInfo,
bulletInfo: BulletInfo
) {
// console.log("renderBullet");
// console.log(renderInfo);
if (!renderInfo || !bulletInfo) return;
let datasetId = parseFloat(bulletInfo.dataset);
let dataset = renderInfo.datasets.getDatasetById(datasetId);
// Set initial dataArea size
if (bulletInfo.orientation === "horizontal") {
renderInfo.dataAreaSize = { width: 250, height: 24 };
} else if (bulletInfo.orientation === "vertical") {
renderInfo.dataAreaSize = { width: 24, height: 250 };
}
let chartElements = createAreas(canvas, renderInfo, bulletInfo);
let retRenderAxis = renderAxis(
chartElements,
renderInfo,
bulletInfo,
dataset
);
if (typeof retRenderAxis === "string") {
return retRenderAxis;
}
renderTitle(chartElements, renderInfo, bulletInfo);
renderBackPanel(chartElements, renderInfo, bulletInfo, dataset);
let retRenderBar = renderBar(
chartElements,
renderInfo,
bulletInfo,
dataset
);
if (typeof retRenderBar === "string") {
return retRenderBar;
}
renderMark(chartElements, renderInfo, bulletInfo, dataset);
setChartScale(canvas, chartElements, renderInfo);
} | the_stack |
import Component from '../Core/Component';
import Hilo3d from '../Core/Hilo3d';
import * as Math from '../Core/Math';
import {SClass} from '../Core/Decorator';
import SArray from '../DataStructure/SArray';
import {INodeWithGlTFExtensions} from '../types/Resource';
import ISceneComponent, {ISceneComponentState} from '../Renderer/ISceneComponent';
import ISceneActor, {isSceneActor} from '../Renderer/ISceneActor';
import SObject from '../Core/SObject';
import CameraComponent from '../Camera/CameraComponent';
import Layers from '../Renderer/Layers';
export {ISceneComponentState};
/**
* @hidden
*/
const tmpVec3 = new Math.Vector3();
/**
* 判断一个实例是否为`SceneComponent`。
*/
export function isSceneComponent(value: SObject): value is SceneComponent {
return (value as SceneComponent).isSceneComponent;
}
/**
* 场景Component类,作为`SceneActor`的根组件,即为实际拥有3D变换功能的一类特殊Component。
* 你可以直接使用它,也可以使用继承自它的那些类(比如`PrimitiveComponent`,图元组件,用于承载模型数据)。
* SceneComponent实例可以以树状结构嵌套,但处于性能考虑,原则上一旦生成就不建议再去动态插拔子级实例。
*
* @template IStateTypes 初始化参数类型,必须继承自[ISceneComponentState](../interfaces/iscenecomponentstate)。
* @noInheritDoc
*/
@SClass({className: 'SceneComponent'})
export default class SceneComponent<
IStateTypes extends ISceneComponentState = ISceneComponentState
> extends Component<IStateTypes> implements ISceneComponent<ISceneComponentState> {
public isSceneComponent: boolean = true;
/**
* 是否需要在销毁时释放Gl资源,如果确定以后还会使用相同的材质、几何体等,可以设为`false`,性能优化。
* 对于GlTF模型实例化的资源默认会设为`false`,而在资源释放时统一释放Gl资源。
*
* @default true
*/
public needReleaseGlRes: boolean = true;
/**
* 图层属性,详见[Layers](../layers)。
*/
public layers: Layers = new Layers();
protected _owner: ISceneActor;
protected _parent: SceneComponent | ISceneActor;
protected _children: SArray<SceneComponent> = new SArray();
protected _node: Hilo3d.Node;
private _tmpQuat = new Math.Quaternion();
/**
* 获取自身的父级实例,根据情况不同可能有不同的类型,一般不需要自己使用。
*/
get parent(): ISceneActor | SceneComponent {
return this._parent || this._owner;
}
/**
* 获取自身的所有子级SceneComponent,一般不需要自己使用。
*/
get children() {
return this._children;
}
/**
* 底层hilo3d的节点,内部随时可能变更实现,**不要自己使用**。
*
* @hidden
*/
get hiloNode() {
return this._node;
}
/**
* 设置该组件在世界中是否可见。
*/
set visible(value: boolean) {
this._node.visible = value;
}
/**
* 获取该组件在世界中是否可见。
*/
get visible() {
return this._node.visible;
}
/**
* 设置是否为静态对象,若是,则从此层开始的所有子级实例都不会在每一帧更新`WorldMatrix`。
* 用于性能优化。
*/
set isStatic(value: boolean) {
this._node.autoUpdateWorldMatrix = !value;
(this._node as any).autoUpdateChildWorldMatrix = !value;
}
/**
* 获取是否为静态对象,若是,则从此层开始的所有子级实例都不会在每一帧更新`WorldMatrix`。
* 用于性能优化。
*/
get isStatic() {
return !this._node.autoUpdateWorldMatrix;
}
/**
* 设置本地空间位置数据。
*/
set position(position: Math.Vector3) {
this._node.position.copy(position);
}
/**
* 获取本地空间位置数据。
*/
get position() {
return this._node.position;
}
/**
* 设置本地空间旋转数据。
*/
set rotation(rotation: Math.Euler) {
this._node.rotation.copy(rotation);
}
/**
* 获取本地空间旋转数据。
*/
get rotation() {
return this._node.rotation;
}
/**
* 设置本地空间缩放数据。
*/
set scale(scale: Math.Vector3) {
this._node.scale.copy(scale);
}
/**
* 获取本地空间缩放数据。
*/
get scale() {
return this._node.scale;
}
/**
* 设置本地空间锚点数据。
*/
set pivot(pivot: Math.Vector3) {
this._node.pivot.copy(pivot);
}
/**
* 获取本地空间锚点数据。
*/
get pivot() {
return this._node.pivot;
}
/**
* 设置本地空间四元数数据。
*/
set quaternion(quaternion: Math.Quaternion) {
this._node.quaternion.copy(quaternion);
}
/**
* 获取本地空间四元数数据。
*/
get quaternion() {
return this._node.quaternion;
}
/**
* 获取本地矩阵数据。
*/
get matrix() {
return this._node.matrix;
}
set x(value: number) {
this.position.x = value;
}
get x() {
return this.position.x;
}
set y(value: number) {
this.position.y = value;
}
get y() {
return this.position.y;
}
set z(value: number) {
this.position.z = value;
}
get z() {
return this.position.z;
}
set rotationX(value: number) {
this.rotation.x = value;
}
get rotationX() {
return this.rotation.x;
}
set rotationY(value: number) {
this.rotation.y = value;
}
get rotationY() {
return this.rotation.y;
}
set rotationZ(value: number) {
this.rotation.z = value;
}
get rotationZ() {
return this.rotation.z;
}
set scaleX(value: number) {
this.scale.x = value;
}
get scaleX() {
return this.scale.x;
}
set scaleY(value: number) {
this.scale.y = value;
}
get scaleY() {
return this.scale.y;
}
set scaleZ(value: number) {
this.scale.z = value;
}
get scaleZ() {
return this.scale.z;
}
set pivotX(value: number) {
this.pivot.x = value;
}
get pivotX() {
return this.pivot.x;
}
set pivotY(value: number) {
this.pivot.y = value;
}
get pivotY() {
return this.pivot.y;
}
set pivotZ(value: number) {
this.pivot.z = value;
}
get pivotZ() {
return this.pivot.z;
}
/**
* 直接设置世界矩阵的数据。
*/
set worldMatrix(matrix: Math.Matrix4) {
this._node.worldMatrix.copy(matrix);
}
/**
* 直接获取世界矩阵的数据。
*/
get worldMatrix() {
return this._node.worldMatrix;
}
/**
* 直接获取组件在世界空间的位置数据。
*/
get absolutePosition(): Math.Vector3 {
return this._node.worldMatrix.getTranslation();
}
/**
* 直接设置组件在世界空间的位置数据。
*
* **注意此设置会涉及矩阵的clone和乘法,有一些性能开销,**
*/
set absolutePosition(value: Math.Vector3) {
const {parent} = this._node;
if (parent) {
const invertParentWorldMatrix = parent.worldMatrix.clone();
invertParentWorldMatrix.invert();
this.position = value.transformMat4(invertParentWorldMatrix);
} else {
this.position = value;
}
}
/**
* 直接获取组件在世界空间的旋转数据。
*
* **注意此值从世界矩阵实时计算而来,有一些性能消耗。**
*/
get absoluteRotation(): Math.Quaternion {
const quat = new Math.Quaternion();
return this._node.worldMatrix.getRotation(quat);
}
/**
* 直接获取组件在世界空间的缩放数据。
*
* **注意此值从世界矩阵实时计算而来,有一些性能消耗。**
*/
get absoluteScale(): Math.Vector3 {
return this._node.worldMatrix.getScaling();
}
/**
* 直接获取组件在**当前摄像机**下标准设备空间的位置数据。
*
* **注意此值从世界矩阵和视图矩阵实时计算而来,有一些性能消耗。**
*/
get ndcPosition() {
const {mainCamera} = this.getWorld();
if (!mainCamera) {
return null;
}
return this.absolutePosition.transformMat4(mainCamera.viewProjectionMatrix);
}
/**
* 直接获取组件在本地空间的forward向量。
*/
get forwardVector() {
return new Math.Vector3(0, 0, 1).transformQuat(this.quaternion);
}
/**
* 直接获取组件在本地空间的up向量。
*/
get upVector() {
return new Math.Vector3(0, 1, 0).transformQuat(this.quaternion);
}
/**
* 直接获取组件在本地空间的right向量。
*/
get rightVector() {
return new Math.Vector3(1, 0, 0).transformQuat(this.quaternion);
}
/**
* 直接获取组件在世界空间的forward向量。
*
* **注意会先获取`absoluteRotation`,有一定开销!**
*/
get worldForwardVector() {
return new Math.Vector3(0, 0, 1).transformQuat(this.absoluteRotation);
}
/**
* 直接获取组件在世界空间的up向量。
*
* **注意会先获取`absoluteRotation`,有一定开销!**
*/
get worldUpVector() {
return new Math.Vector3(0, 1, 0).transformQuat(this.absoluteRotation);
}
/**
* 直接获取组件在世界空间的right向量。
*
* **注意会先获取`absoluteRotation`,有一定开销!**
*/
get worldRightVector() {
return new Math.Vector3(1, 0, 0).transformQuat(this.absoluteRotation);
}
/**
* 直接获取组件的UP向量。
*
* **注意这是一个可修改的值,如果没有特殊需求以防万一请用`upVector`!**
*/
get up() {
return this._node.up;
}
/**
* 直接获取组件在**指定摄像机**下标准设备空间的位置数据。
*
* **注意此值从世界矩阵和视图矩阵实时计算而来,有一些性能消耗。**
*/
public getNdcPosition(camera: CameraComponent) {
return this.absolutePosition.transformMat4(camera.viewProjectionMatrix);
}
/**
* 初始化,继承请先`super.onInit()`。
*/
public onInit(initState?: IStateTypes) {
this._node = new Hilo3d.Node();
if (!initState) {
return;
}
const initTransform = initState;
if (initTransform.position) {
this._node.position.copy(initTransform.position);
delete initTransform['position'];
}
if (initTransform.rotation) {
this._node.rotation.copy(initTransform.rotation);
delete initTransform['rotation'];
}
if (initTransform.pivot) {
this._node.pivot.copy(initTransform.pivot);
delete initTransform['pivot'];
}
if (initTransform.quaternion) {
this._node.quaternion.copy(initTransform.quaternion);
delete initTransform['quaternion'];
}
if (initTransform.matrix) {
this._node.matrix.copy(initTransform.matrix);
delete initTransform['matrix'];
}
if (initTransform.visible !== undefined) {
this._node.visible = initTransform.visible;
}
}
/**
* 销毁,继承请先`super.onDestroy()`。
*/
public onDestroy() {
if (this.needReleaseGlRes) {
this._node.destroy(this.getGame().renderer, true);
} else {
this._node.removeFromParent();
}
}
/**
* 获取组件的的包围盒(AABB)信息。
* @param bounds 当前计算的包围盒信息,可用于节省开销
* @param currentMatrix 当前计算的矩阵,可用于节省开销
*/
public getBounds(bounds?: Math.Bounds, currentMatrix?: Math.Matrix4) {
return this._node.getBounds(null, currentMatrix, bounds);
}
/**
* **不要自己调用!!**
*
* @hidden
*/
public addChild(component: SceneComponent) {
this._children.add(component);
component._parent = this;
component._node.addTo(this._node);
return this;
}
/**
* **不要自己调用!!**
*
* @hidden
*/
public removeChild(component: SceneComponent) {
const index = this._children.indexOf(component);
if (index < 0) {
return;
}
component._parent = null;
this._children.remove(index);
return this;
}
/**
* 设置本地空间位置数据。
*/
public setPosition(x: number, y: number, z: number) {
this._node.setPosition(x, y, z);
return this;
}
/**
* 设置本地空间旋转数据。
*/
public setRotation(x: number, y: number, z: number) {
this._node.setRotation(x, y, z);
return this;
}
/**
* 设置本地空间位移数据。
*/
public setScale(x: number, y: number, z: number) {
this._node.setScale(x, y, z);
return this;
}
/**
* 设置本地空间锚点数据。
*/
public setPivot(x: number, y: number, z: number) {
this._node.setPivot(x, y, z);
return this;
}
/**
* 设置本地空间四元数数据。
*/
public setQuaternion(x: number, y: number, z: number, w: number) {
this._node.quaternion.x = x;
this._node.quaternion.y = y;
this._node.quaternion.z = z;
this._node.quaternion.w = w;
return this;
}
/**
* 设置世界空间位置数据。
*/
public setAbsolutePosition(x: number, y: number, z: number) {
this.absolutePosition = new Math.Vector3(x, y, z);
return this;
}
/**
* 沿着某个轴`axis`平移`distance`距离。
*/
public translate(axis: Math.Vector3, distance: number) {
tmpVec3.copy(axis).scale(distance);
this.position.add(tmpVec3);
// this.absolutePosition = this.absolutePosition.add(tmpVec3);
return this;
}
/**
* 绕着某个轴`axis`旋转`rad`弧度。
*/
public rotate(axis: Math.Vector3, rad: number) {
this._tmpQuat.setAxisAngle(axis, rad);
this.quaternion.multiply(this._tmpQuat);
return this;
}
/**
* 更新当前实例以及子级组件的世界矩阵。
*/
public updateMatrixWorld(force?: boolean) {
let node = this._node;
while (node) {
if (node.parent && (node.parent as any).isStage) {
node.updateMatrixWorld(force);
break;
}
node = node.parent;
}
return this;
}
/**
* 将自己从父级移除,基本等同于`destroy`方法,从Owner中销毁自身,同时递归移除所有子级组件。
* 若父级也为`SceneComponent`,则会将自身从父级`children`中移除。
*/
public removeFromParent() {
if (!isSceneActor(this._parent)) {
this._parent.removeChild(this);
}
this._owner.removeComponent(this);
return this;
}
/**
* 修改自身的朝向。
*/
public lookAt(target: Math.Vector3 | ISceneActor | SceneComponent) {
if (Math.isVector3(target)) {
this._node.lookAt(target);
return this;
}
if (isSceneActor(target)) {
this._node.lookAt(target.transform.absolutePosition);
return this;
}
this._node.lookAt(target.absolutePosition);
return this;
}
/**
* **不要自己调用!!**
*
* @hidden
*/
public cloneFromHiloNode(node: INodeWithGlTFExtensions) {
(this._node as INodeWithGlTFExtensions).gltfExtensions = node.gltfExtensions;
this._node.jointName = node.jointName;
this._node.animationId = node.animationId;
this._node.name = node.name;
}
/**
* 可以自行重写用于添加预渲染策略!
* 进行一次预渲染,期间会处理材质预编译、资源预提交等。
*/
public preRender() {
}
} | the_stack |
import { isEqual, sortBy } from "lodash"
import Micromerge, {
Json, ObjectId, OperationId, OperationPath,
BaseOperation, Patch,
ListItemMetadata, ListMetadata,
compareOpIds, getListElementId
} from "./micromerge"
import { Marks, markSpec, MarkType } from "./schema"
export type MarkOperation = AddMarkOperation | RemoveMarkOperation
/** A position at which a mark operation can start or end.
* In a text string with n characters, there are 2n+2 boundary positions:
* one to the left or right of each character, plus the start and end of the string.
*/
export type BoundaryPosition =
| { type: "before"; elemId: OperationId }
| { type: "after"; elemId: OperationId }
| { type: "startOfText" }
| { type: "endOfText" }
type MarkOpsPosition = "markOpsBefore" | "markOpsAfter"
interface AddMarkOperationBase<M extends MarkType> extends BaseOperation {
action: "addMark"
/** List element to apply the mark start. */
start: BoundaryPosition
/** List element to apply the mark end, inclusive. */
end: BoundaryPosition
/** Mark to add. */
markType: M
}
export interface FormatSpanWithText {
text: string
marks: MarkMap
}
export type AddMarkOperation = Values<{
[M in MarkType]: keyof Omit<MarkValue[M], "active"> extends never
? AddMarkOperationBase<M> & { attrs?: undefined }
: AddMarkOperationBase<M> & {
attrs: Required<Omit<MarkValue[M], "active">>
}
}>
interface RemoveMarkOperationBase<M extends MarkType> extends BaseOperation {
action: "removeMark"
/** List element to apply the mark start. */
start: BoundaryPosition
/** List element to apply the mark end, inclusive. */
end: BoundaryPosition
/** Mark to add. */
markType: M
}
export type RemoveMarkOperation =
| RemoveMarkOperationBase<"strong">
| RemoveMarkOperationBase<"em">
| (RemoveMarkOperationBase<"comment"> & {
/** Data attributes for the mark. */
attrs: MarkValue["comment"]
})
| RemoveMarkOperationBase<"link">
interface AddMarkOperationInputBase<M extends MarkType> {
action: "addMark"
/** Path to a list object. */
path: OperationPath
/** Index in the list to apply the mark start, inclusive. */
startIndex: number
/** Index in the list to end the mark, exclusive. */
endIndex: number
/** Mark to add. */
markType: M
}
// TODO: automatically populate attrs type w/o manual enumeration
export type AddMarkOperationInput = Values<{
[M in MarkType]: keyof Omit<MarkValue[M], "active"> extends never
? AddMarkOperationInputBase<M> & { attrs?: undefined }
: AddMarkOperationInputBase<M> & {
attrs: Required<Omit<MarkValue[M], "active">>
}
}>
// TODO: What happens if the mark isn't active at all of the given indices?
// TODO: What happens if the indices are out of bounds?
interface RemoveMarkOperationInputBase<M extends MarkType> {
action: "removeMark"
/** Path to a list object. */
path: OperationPath
/** Index in the list to remove the mark, inclusive. */
startIndex: number
/** Index in the list to end the mark removal, exclusive. */
endIndex: number
/** Mark to remove. */
markType: M
}
export type RemoveMarkOperationInput =
| (RemoveMarkOperationInputBase<"strong"> & {
attrs?: undefined
})
| (RemoveMarkOperationInputBase<"em"> & {
attrs?: undefined
})
| (RemoveMarkOperationInputBase<"comment"> & {
/** Data attributes for the mark. */
attrs: MarkValue["comment"]
})
| (RemoveMarkOperationInputBase<"link"> & {
/** Data attributes for the mark. */
attrs?: undefined
})
type CommentMarkValue = {
id: string
}
type BooleanMarkValue = { active: boolean }
type LinkMarkValue = { url: string }
export type MarkValue = Assert<
{
strong: BooleanMarkValue
em: BooleanMarkValue
comment: CommentMarkValue
link: LinkMarkValue
},
{ [K in MarkType]: Record<string, unknown> }
>
export type MarkMap = {
[K in MarkType]?: Marks[K]["allowMultiple"] extends true ? Array<MarkValue[K]> : MarkValue[K]
}
export type FormatSpan = {
marks: MarkMap
start: number
}
/**
* As we walk through the document applying the operation, we keep track of whether we've reached the right area.
*/
type MarkOpState = "BEFORE" | "DURING" | "AFTER"
/** A patch which only has a start index and not an end index yet.
* Used when we're iterating thru metadata sequence and constructing a patch to emit.
*/
type PartialPatch = Omit<AddMarkOperationInput, "endIndex"> | Omit<RemoveMarkOperationInput, "endIndex">
export function applyAddRemoveMark(op: MarkOperation, object: Json, metadata: ListMetadata): Patch[] {
if (!(metadata instanceof Array)) {
throw new Error(`Expected list metadata for a list`)
}
if (!(object instanceof Array)) {
throw new Error(`Expected list metadata for a list`)
}
// we shall build a list of patches to return
const patches: Patch[] = []
// Make an ordered list of all the document positions, walking from left to right
type Positions = [number, MarkOpsPosition, ListItemMetadata][]
const positions = Array.from(metadata.entries(), ([i, elMeta]) => [
[i, "markOpsBefore", elMeta],
[i, "markOpsAfter", elMeta]
]).flat() as Positions;
// set up some initial counters which will keep track of the state of the document.
// these are explained where they're used.
let visibleIndex = 0
let currentOps = new Set<MarkOperation>()
let opState: MarkOpState = "BEFORE"
let partialPatch: PartialPatch | undefined
const objLength = object.length as number // pvh wonders: why does this not account for deleted items?
for (const [, side, elMeta] of positions) {
// First we update the currently known formatting operations affecting this position
currentOps = elMeta[side] || currentOps
let changedOps
[opState, changedOps] = calculateOpsForPosition(op, currentOps, side, elMeta, opState)
if (changedOps) { elMeta[side] = changedOps }
// Next we need to do patch maintenance.
// Once we are DURING the operation, we'll start a patch, emitting an intermediate patch
// any time the formatting changes during that range, and eventually emitting one last patch
// at the end of the range (or document.)
if (side === "markOpsAfter" && !elMeta.deleted) {
// We need to keep track of the "visible" index, since the outside world won't know about
// deleted characters.
visibleIndex += 1
}
if (changedOps) {
// First see if we need to emit a new patch, which occurs when formatting changes
// within the range of characters the formatting operation targets.
if (partialPatch) {
const patch = finishPartialPatch(partialPatch, visibleIndex, objLength)
if (patch) { patches.push(patch) }
partialPatch = undefined
}
// Now begin a new patch since we have new formatting to send out.
if (opState == "DURING" && !isEqual(opsToMarks(currentOps), opsToMarks(changedOps))) {
partialPatch = beginPartialPatch(op, visibleIndex)
}
}
if (opState == "AFTER") { break }
}
// If we have a partial patch leftover at the end, emit it
if (partialPatch) {
const patch = finishPartialPatch(partialPatch, visibleIndex, objLength)
if (patch) { patches.push(patch) }
}
return patches
}
function calculateOpsForPosition(
op: MarkOperation, currentOps: Set<MarkOperation>,
side: MarkOpsPosition,
elMeta: ListItemMetadata,
opState: MarkOpState): [opState: MarkOpState, newOps?: Set<MarkOperation>] {
// Compute an index in the visible characters which will be used for patches.
// If this character is visible and we're on the "after slot", then the relevant
// index is one to the right of the current visible index.
// Otherwise, just use the current visible index.
const opSide = side === "markOpsAfter" ? "after" : "before"
if (op.start.type === opSide && op.start.elemId === elMeta.elemId) {
// we've reached the start of the operation
return ["DURING", new Set([...currentOps, op])]
} else if (op.end.type === opSide && op.end.elemId === elMeta.elemId) {
// and here's the end of the operation
return ["AFTER", new Set([...currentOps].filter(opInSet => opInSet !== op))]
} else if (opState == "DURING" && elMeta[side] !== undefined) {
// we've hit some kind of change in formatting mid-operation
return ["DURING", new Set([...currentOps, op])]
}
// No change...
return [opState, undefined]
}
function beginPartialPatch(
op: MarkOperation,
startIndex: number
): PartialPatch {
const partialPatch: PartialPatch = {
action: op.action,
markType: op.markType,
path: [Micromerge.contentKey],
startIndex,
}
if (op.action === "addMark" && (op.markType === "link" || op.markType === "comment")) {
partialPatch.attrs = op.attrs
}
return partialPatch
}
function finishPartialPatch(partialPatch: PartialPatch, endIndex: number, length: number): Patch | undefined {
// Exclude certain patches which make sense from an internal metadata perspective,
// but wouldn't make sense to an external caller:
// - Any patch where the start or end is after the end of the currently visible text
// - Any patch that is zero width, affecting no visible characters
const patch = { ...partialPatch, endIndex: Math.min(endIndex, length) } as AddMarkOperationInput | RemoveMarkOperationInput
const patchIsNotZeroLength = endIndex > partialPatch.startIndex
const patchAffectsVisibleDocument = partialPatch.startIndex < length
if (patchIsNotZeroLength && patchAffectsVisibleDocument) {
return patch
}
return undefined
}
/** Given a set of mark operations for a span, produce a
* mark map reflecting the effects of those operations.
* (The ops can be in arbitrary order and the result is always
* the same, because we do op ID comparisons.)
*/
// PVH code comment
// we could radically simplify this by storing opId separately,
// giving em/strong a boolean attrs and treating equality as key/attr equality
// might be worth doing for the AM implementation
export function opsToMarks(ops: Set<MarkOperation>): MarkMap {
const markMap: MarkMap = {}
const opIdMap: Record<MarkType, OperationId> = {}
// Construct a mark map which stores op IDs
for (const op of ops) {
const existingOpId = opIdMap[op.markType]
// To ensure convergence, we don't always apply the operation to the mark map.
// It only gets applied if its opID is greater than the previous op that
// affected that value
if (!markSpec[op.markType].allowMultiple) {
if (existingOpId === undefined || compareOpIds(op.opId, existingOpId) === 1) {
opIdMap[op.markType] = op.opId
if (op.action === "addMark") {
markMap[op.markType] = op.attrs || { active: true }
}
else {
delete markMap[op.markType]
}
}
} else {
if (op.action === "addMark" && !markMap[op.markType]?.find(c => c.id === op.attrs.id)) {
// Keeping the comments in ID-sorted order helps make equality checks easier later
// because we can just check mark maps for deep equality
markMap[op.markType] = sortBy([...(markMap[op.markType] || []), op.attrs], c => c.id)
} else if (op.action === "removeMark") {
markMap[op.markType] = (markMap[op.markType] || []).filter(c => c.id !== op.attrs.id)
}
}
}
return markMap
}
export function getActiveMarksAtIndex(metadata: ListMetadata, index: number): MarkMap {
return opsToMarks(findClosestMarkOpsToLeft({ metadata, index, side: "before" }))
}
/** Given a path to somewhere in the document, return a list of format spans w/ text.
* Each span specifies the formatting marks as well as the text within the span.
* (This function avoids the need for a caller to manually stitch together
* format spans with a text string.)
*/
export function getTextWithFormatting(text: Json, metadata: ListMetadata): Array<FormatSpanWithText> {
// Conveniently print out the metadata array, useful for debugging
// console.log(
// inspect(
// {
// actorId: this.actorId,
// metadata: metadata?.map((item: ListItemMetadata, index: number) => ({
// char: text[index],
// before: item.markOpsBefore,
// after: item.markOpsAfter,
// })),
// },
// false,
// 4,
// ),
// )
// XXX: should i pass in the objectId for this?
if (text === undefined || !(text instanceof Array)) {
throw new Error(`Expected a list at object ID ${"objectId".toString()}`)
}
if (metadata === undefined || !(metadata instanceof Array)) {
throw new Error(`Expected list metadata for object ID ${"objectId".toString()}`)
}
const spans: FormatSpanWithText[] = []
let characters: string[] = []
let marks: MarkMap = {}
let visible = 0
for (const [index, elMeta] of metadata.entries()) {
let newMarks: MarkMap | undefined
// Figure out if new formatting became active in the gap before this character:
// either on the "before" set of this character, or the "after" of previous character.
// The "before" of this character takes precedence because it's later in the sequence.
if (elMeta.markOpsBefore) {
newMarks = opsToMarks(elMeta.markOpsBefore)
} else if (index > 0 && metadata[index - 1].markOpsAfter) {
newMarks = opsToMarks(metadata[index - 1].markOpsAfter!)
}
if (newMarks !== undefined) {
// If we have some characters to emit, need to add to formatted spans
addCharactersToSpans({ characters, spans, marks })
characters = []
marks = newMarks
}
if (!elMeta.deleted) {
// todo: what happens if the char isn't a string?
characters.push(text[visible] as string)
visible += 1
}
}
addCharactersToSpans({ characters, spans, marks })
return spans
}
// Given a position before or after a character in a list, returns a set of mark operations
// which represent the closest set of mark ops to the left in the metadata.
// - The search excludes the passed-in position itself, so if there is metadata at that position
// it will not be returned.
// - Returns a new Set object that clones the existing one to avoid problems with sharing references.
// - If no mark operations are found between the beginning of the sequence and this position,
//
function findClosestMarkOpsToLeft(args: {
index: number
side: "before" | "after"
metadata: ListMetadata
}): Set<MarkOperation> {
const { index, side, metadata } = args
let ops = new Set<MarkOperation>()
// First, if our initial position is after a character, look before that character
if (side === "after" && metadata[index].markOpsBefore !== undefined) {
return new Set(metadata[index].markOpsBefore!)
}
// Iterate through all characters to the left of the initial one;
// first look after each character, then before it.
for (let i = index - 1; i >= 0; i--) {
const metadataAfter = metadata[i].markOpsAfter
if (metadataAfter !== undefined) {
ops = new Set(metadataAfter)
break
}
const metadataBefore = metadata[i].markOpsBefore
if (metadataBefore !== undefined) {
ops = new Set(metadataBefore)
break
}
}
return ops
}
/** Add some characters with given marks to the end of a list of spans */
export function addCharactersToSpans(args: {
characters: string[]
marks: MarkMap
spans: FormatSpanWithText[]
}): void {
const { characters, marks, spans } = args
if (characters.length === 0) {
return
}
// If the new marks are same as the previous span, we can just
// add the new characters to the last span
if (spans.length > 0 && isEqual(spans.slice(-1)[0].marks, marks)) {
spans.slice(-1)[0].text = spans.slice(-1)[0].text.concat(characters.join(""))
} else {
// Otherwise we create a new span with the characters
spans.push({ text: characters.join(""), marks })
}
}
// TODO: what's up with these return types?
export function changeMark(
inputOp: AddMarkOperationInput | RemoveMarkOperationInput,
objId: ObjectId,
meta: ListMetadata,
obj: Json[] | (Json[] & Record<string, Json>)): DistributiveOmit<AddMarkOperation | RemoveMarkOperation, "opId"> {
const { action, startIndex, endIndex, markType, attrs } = inputOp
// TODO: factor this out to a proper per-mark-type config object somewhere
const startGrows = false
const endGrows = markSpec[inputOp.markType].inclusive
let start: BoundaryPosition
let end: BoundaryPosition
/**
* [start]---["H"]---["e"]---["y"]---[end]
* | | | | | | | |
* SA 0B 0A 1B 1A 2B 2A EB
*
* Spans that grow attach to the next/preceding position, sometimes
* on a different character, so if a span ends on character 1 "e" but should
* expand if new text is inserted, we actually attach the end of the span to
* character 2's "before" slot.
*/
if (startGrows && inputOp.startIndex == 0) {
start = { type: "startOfText" }
} else if (startGrows) {
start = { type: "after", elemId: getListElementId(meta, startIndex - 1) }
} else {
start = { type: "before", elemId: getListElementId(meta, startIndex) }
}
if (endGrows && inputOp.endIndex >= obj.length) {
end = { type: "endOfText" }
} else if (endGrows) {
end = { type: "before", elemId: getListElementId(meta, endIndex) }
} else {
end = { type: "after", elemId: getListElementId(meta, endIndex - 1) }
}
const partialOp: DistributiveOmit<AddMarkOperation | RemoveMarkOperation, "opId"> = { action, obj: objId, start, end, markType, ...(attrs) && { attrs } }
return partialOp
} | the_stack |
class SplashController {
private _addSplashWindowModel: any;
public constructor(addSplashWindowModel: any) {
this._addSplashWindowModel = addSplashWindowModel;
}
// Insert the info button in the cesium navigation help popup (the question mark button)
public insertSplashInfoHelp(): void {
// Insert info button
let cesiumNavHelp: any = document.getElementsByClassName("cesium-navigation-help")[0];
cesiumNavHelp.style.width = "270px";
let mouseButton: any = document.getElementsByClassName("cesium-navigation-button cesium-navigation-button-left")[0];
mouseButton.classList.add("cesium-navigation-button-custom");
mouseButton.style.width = "33.33%";
let touchButton: any = document.getElementsByClassName("cesium-navigation-button cesium-navigation-button-right")[0];
touchButton.classList.add("cesium-navigation-button-custom");
touchButton.style.width = "33.33%";
touchButton.style.borderRadius = "0 0 0 0";
let infoButton: any = document.createElement("BUTTON");
infoButton.type = "button";
infoButton.className = "cesium-navigation-button cesium-navigation-button-right cesium-navigation-button-unselected";
infoButton.style.width = "33.33%";
infoButton.style.filter = "none";
let infoImage: any = document.createElement("IMG");
infoImage.src = "images/Info.svg";
infoImage.className = "cesium-navigation-button-icon";
infoImage.style = "width: 25px; height: 25px; filter: none;";
infoButton.appendChild(infoImage);
infoButton.appendChild(document.createTextNode('About'));
touchButton.parentNode.insertBefore(infoButton, touchButton.nextSibling);
// Insert contents
//var wrapper = document.getElementsByClassName("cesium-navigationHelpButton-wrapper")[0];
let container: any = document.getElementsByClassName("cesium-navigation-help")[0];
let contents: any = document.createElement("DIV");
contents.className = 'cesium-navigation-help-instructions';
contents.style.display = "none";
contents.innerHTML = '\
<div class="cesium-navigation-help-zoom" style="padding: 15px 5px 20px 5px; text-align: center;">3DCityDB Web Map Client</div>\
<hr width="50%" style="margin-top: -10px; border-color: grey;">\
<div class="cesium-navigation-help-details" style="padding: 5px; text-align: center;">This tool employs the JavaScript library <a href="https://cesiumjs.org/" target="_blank">CesiumJS</a> and is a part of the</div>\
<table>\
<tr>\
<td><img src="' + 'images/3DCityDB_Logo.png' + '" width="76" height="81" /></td>\
<td>\
<!-- <div class="cesium-navigation-help-pan">Chair of Geoinformatics</div>\ -->\
<div class="cesium-navigation-help-details"><a href="https://www.3dcitydb.org/3dcitydb/" target="_blank">3D City Database</a> (3DCityDB) <br> Software Suite.</div>\
</td>\
</tr>\
</table>\
\
<div class="cesium-navigation-help-details" style="padding: 5px 5px 5px 5px; text-align: center;">Developed and maintained by:</div>\
<table>\
<tr>\
<td><img src="' + 'images/TUM_Logo.svg' + '" width="76" height="40" /></td>\
<td>\
<!-- <div class="cesium-navigation-help-pan">Chair of Geoinformatics</div>\ -->\
<div class="cesium-navigation-help-details"><a href="https://www.gis.bgu.tum.de/en/home/" target="_blank">TUM, Chair of Geoinformatics</a></div>\
</td>\
</tr>\
</table>\
<div class="cesium-navigation-help-zoom" style="padding: 5px 5px 5px 5px; text-align: center;">\
<button class="cesium-button" style="font-size: medium; padding: 10px 15px 10px 15px; text-align: center;" onclick="splashController.addSplashWindow(jQuery)">Show splash window</button>\
</div>';
container.appendChild(contents);
// Handle switching
infoButton.onclick = function () {
// Unselect mouse button
mouseButton.classList.remove('cesium-navigation-button-selected');
mouseButton.classList.add('cesium-navigation-button-unselected');
// Unselect touch button
touchButton.classList.remove('cesium-navigation-button-selected');
touchButton.classList.add('cesium-navigation-button-unselected');
// Select info button
infoButton.classList.remove('cesium-navigation-button-unselected');
infoButton.classList.add('cesium-navigation-button-selected');
contents.style.display = "block";
// Hide mouse contents
let mouseContents = document.getElementsByClassName("cesium-click-navigation-help")[0];
mouseContents.classList.remove("cesium-click-navigation-help-visible");
// Hide touch contents
let touchContents = document.getElementsByClassName("cesium-touch-navigation-help")[0];
touchContents.classList.remove("cesium-touch-navigation-help-visible");
}
mouseButton.onclick = function () {
// Unselect info button
infoButton.classList.remove('cesium-navigation-button-selected');
infoButton.classList.add('cesium-navigation-button-unselected');
// Unselect touch button
touchButton.classList.remove('cesium-navigation-button-selected');
touchButton.classList.add('cesium-navigation-button-unselected');
// Select mouse button
mouseButton.classList.remove('cesium-navigation-button-unselected');
mouseButton.classList.add('cesium-navigation-button-selected');
document.getElementsByClassName("cesium-click-navigation-help cesium-navigation-help-instructions")[0].classList.add("cesium-click-navigation-help-visible");
// Hide touch contents
let touchContents = document.getElementsByClassName("cesium-touch-navigation-help")[0];
touchContents.classList.remove("cesium-touch-navigation-help-visible");
// Hide info contents
contents.style.display = "none";
}
touchButton.onclick = function () {
// Unselect info button
infoButton.classList.remove('cesium-navigation-button-selected');
infoButton.classList.add('cesium-navigation-button-unselected');
// Unselect mouse button
mouseButton.classList.remove('cesium-navigation-button-selected');
mouseButton.classList.add('cesium-navigation-button-unselected');
// Select touch button
touchButton.classList.remove('cesium-navigation-button-unselected');
touchButton.classList.add('cesium-navigation-button-selected');
document.getElementsByClassName("cesium-touch-navigation-help cesium-navigation-help-instructions")[0].classList.add("cesium-touch-navigation-help-visible");
// Hide mouse contents
let mouseContents = document.getElementsByClassName("cesium-click-navigation-help")[0];
mouseContents.classList.remove("cesium-click-navigation-help-visible");
// Hide info contents
contents.style.display = "none";
}
// Source: https://stackoverflow.com/questions/2705583/how-to-simulate-a-click-with-javascript
function eventFire(el, etype) {
if (el.fireEvent) {
el.fireEvent('on' + etype);
} else {
let evObj = document.createEvent('Events');
evObj.initEvent(etype, true, false);
el.dispatchEvent(evObj);
}
}
// Show info in the help popup by default
eventFire(infoButton, 'click');
}
public addSplashWindow(jQuery: any): void {
let splashIframe: any = document.getElementById("splashwindow_iframe_content");
splashIframe.src = this._addSplashWindowModel.url;
this.setCookie("ignoreSplashWindow", (this._addSplashWindowModel.showOnStart == "false" || this._addSplashWindowModel.showOnStart == false) + "");
// show splash window now
this.openSplashWindow(jQuery);
}
public removeSplashWindow(jQuery: any): void {
let splashIframe: any = document.getElementById("splashwindow_iframe_content");
splashIframe.src = undefined;
this.setCookie("ignoreSplashWindow", undefined);
this._addSplashWindowModel.url = "";
this._addSplashWindowModel.showOnStart = "";
// close splash window now
this.closeSplashWindow(jQuery);
}
public ignoreSplashWindow(jQuery: any): void {
this.createCookie("ignoreSplashWindow", "true", 14);
let showOnstartCheckbox: any = document.getElementById("showOnStart_checkbox");
showOnstartCheckbox.checked = false;
this.closeSplashWindow(jQuery);
}
// Hide splash window and unblur all elements
public closeSplashWindow(jQuery: any): void {
let splashWindowIframe: any = document.getElementById("splashwindow_iframe");
splashWindowIframe.style.display = 'none';
let splashScreenButton: any = document.getElementsByClassName("splashscreen-buttons")[0];
splashScreenButton.style.display = 'none';
(function ($) {
$('*').css({
'-webkit-filter': 'none',
'-moz-filter': 'none',
'-o-filter': 'none',
'-ms-filter': 'none',
'filter': 'none',
});
})(jQuery);
}
// Open splash window and blur all elements but the splash window
public openSplashWindow(jQuery: any): void {
let splashWindowIframe: any = document.getElementById("splashwindow_iframe");
splashWindowIframe.style.display = 'block';
let splashScreenButton: any = document.getElementsByClassName("splashscreen-buttons")[0];
splashScreenButton.style.display = 'block';
(function ($) {
$('body>*:not(#splashwindow_iframe):not(.splashscreen-buttons)').css("filter", "blur(3px)");
})(jQuery);
}
public getSplashWindowFromUrl(url: string, urlController: UrlController, jQuery: any, CitydbUtil: any, Cesium: any): void {
let tmp_url = "";
let tmp_showOnStart = "";
let default_url = this.getDefaultAddSplashWindowModel().url;
let default_showOnStart = this.getDefaultAddSplashWindowModel().showOnStart;
let ignoreSplashWindow_cookie = this.getCookie("ignoreSplashWindow");
let splashWindowConfigString = urlController.getUrlParaValue('splashWindow', url, CitydbUtil);
if (splashWindowConfigString) {
let splashWindowConfig = Cesium.queryToObject(Object.keys(Cesium.queryToObject(splashWindowConfigString))[0]);
tmp_url = (typeof splashWindowConfig.url === "undefined" || splashWindowConfig.url === "") ? default_url : splashWindowConfig.url;
// if this page has already been visited and has the cookie ignoreSplashWindow, then priortize this cookie before the URL string parameter showOnStart
if (typeof ignoreSplashWindow_cookie === "undefined" || ignoreSplashWindow_cookie == "") {
tmp_showOnStart = (typeof splashWindowConfig.showOnStart === "undefined" || splashWindowConfig.showOnStart === "") ? default_showOnStart : splashWindowConfig.showOnStart;
} else {
tmp_showOnStart = (ignoreSplashWindow_cookie == "false") + "";
}
} else {
tmp_url = default_url;
if (typeof ignoreSplashWindow_cookie === "undefined" || ignoreSplashWindow_cookie == "") {
tmp_showOnStart = default_showOnStart;
ignoreSplashWindow_cookie = (default_showOnStart == "false") + "";
} else {
tmp_showOnStart = (ignoreSplashWindow_cookie == "false") + "";
}
}
this._addSplashWindowModel.url = (this._addSplashWindowModel.url ? this._addSplashWindowModel.url : tmp_url);
this._addSplashWindowModel.showOnStart = tmp_showOnStart;
let showOnStartCheckbox: any = document.getElementById("showOnStart_checkbox");
showOnStartCheckbox.checked = (tmp_showOnStart == "true");
ignoreSplashWindow_cookie = (tmp_showOnStart == "false") + "";
if (ignoreSplashWindow_cookie === "true") {
this.closeSplashWindow(jQuery);
} else {
this.addSplashWindow(jQuery);
}
}
public getDefaultAddSplashWindowModel(): any {
return {
url: "splash/SplashWindow.html",
showOnStart: "true"
}
}
public setCookie(c_name, value): void {
this.createCookie(c_name, value, null);
}
// Source: https://stackoverflow.com/questions/4825683/how-do-i-create-and-read-a-value-from-cookie
public createCookie(name, value, days): void {
var expires;
if (days) {
var date = new Date();
date.setTime(date.getTime() + (days * 24 * 60 * 60 * 1000));
expires = "; expires=" + date.toUTCString();
} else {
expires = "";
}
document.cookie = name + "=" + value + expires + "; path=/";
}
public getCookie(c_name): string {
if (document.cookie.length > 0) {
let c_start = document.cookie.indexOf(c_name + "=");
if (c_start != -1) {
c_start = c_start + c_name.length + 1;
let c_end = document.cookie.indexOf(";", c_start);
if (c_end == -1) {
c_end = document.cookie.length;
}
return unescape(document.cookie.substring(c_start, c_end));
}
}
return "";
}
} | the_stack |
import { Component, ElementRef, OnDestroy, OnInit, ViewChild } from "@angular/core";
import { animate, state, style, transition, trigger } from "@angular/animations";
import { AbstractMenuPageComponent } from "~/abstract-menu-page-component";
import { AR, ARDebugLevel, ARMaterial, ARPlaneTappedEventData, ARPosition, ARNodeInteraction } from "nativescript-ar";
import { RouterExtensions } from "nativescript-angular";
import { PluginInfo } from "~/shared/plugin-info";
import { PluginInfoWrapper } from "~/shared/plugin-info-wrapper";
import { PropertyChangeData } from "tns-core-modules/data/observable";
import { Color } from "tns-core-modules/color";
import { AppComponent } from "~/app.component";
import { keepAwake, allowSleepAgain } from "nativescript-insomnia";
const flashlight = require("nativescript-flashlight");
@Component({
selector: "page-ar",
moduleId: module.id,
templateUrl: "./ar.component.html",
styleUrls: ["ar-common.css"],
animations: [
trigger("flyInOut", [
state("in", style({transform: "scale(1)", opacity: 1})),
transition("void => *", [
style({transform: "scale(0.9)", opacity: 0}),
animate("1000ms 100ms ease-out")
])
]),
trigger("from-right", [
state("in", style({
"opacity": 1,
transform: "translate(0)"
})),
state("void", style({
"opacity": 0,
transform: "translate(20%)"
})),
transition("void => *", [animate("600ms 1500ms ease-out")])
])
]
})
export class ARComponent extends AbstractMenuPageComponent implements OnInit, OnDestroy {
private ar: AR;
private firstPlaneDetected: boolean = false;
hint: string;
planesVisible: boolean = true;
planeDetectionActive: boolean = true;
statsEnabled: boolean = true;
flashlightActive: boolean = false;
isSupported: boolean;
debugLevel: ARDebugLevel = ARDebugLevel.FEATURE_POINTS;
models: Array<string> = ["Box", "Sphere", "Tube", "Car", "Ball", "Tree"];
selectedModelIndex = 0;
// All these are valid plane materials:
// planeMaterial = "Assets.scnassets/Materials/tron/tron-diffuse.png";
// planeMaterial = new Color("red");
planeMaterial = <ARMaterial>{
diffuse: new Color("white"),
transparency: 0.2
};
@ViewChild("dropDown", {static: false}) dropDown: ElementRef;
constructor(protected appComponent: AppComponent,
protected routerExtensions: RouterExtensions) {
super(appComponent, routerExtensions);
this.hint = "Search for planes by pointing at a surface";
}
ngOnInit(): void {
this.isSupported = AR.isSupported();
// if this is false on a modern iOS 11 device, rebuild in Xcode
if (!this.isSupported) {
this.hint = "THIS DEVICE DOESN'T SUPPORT AR ☹️";
}
// modify the background color of the DropDown picker
const drop: any = this.dropDown.nativeElement;
if (this.dropDown.nativeElement.ios) {
const pickerView: UIPickerView = drop._listPicker;
pickerView.backgroundColor = new Color("#444").ios;
}
keepAwake().then(() => console.log("Insomnia is now ON"));
}
ngOnDestroy(): void {
allowSleepAgain().then(() => console.log("Insomnia is now OFF"));
}
togglePlaneDetection(args: PropertyChangeData): void {
if (args.value !== null && args.value !== this.planeDetectionActive) {
this.planeDetectionActive = args.value;
this.ar.togglePlaneDetection(this.planeDetectionActive);
this.debugLevel = this.planeDetectionActive ? ARDebugLevel.FEATURE_POINTS : ARDebugLevel.NONE;
}
}
toggleFlashlight(args: PropertyChangeData): void {
if (args.value !== null && args.value !== this.flashlightActive) {
this.flashlightActive = args.value;
this.flashlightActive ? flashlight.on() : flashlight.off();
}
}
togglePlaneVisibility(args: PropertyChangeData): void {
if (args.value !== null && args.value !== this.planesVisible) {
this.planesVisible = args.value;
this.ar.togglePlaneVisibility(this.planesVisible);
this.debugLevel = this.planesVisible ? ARDebugLevel.FEATURE_POINTS : ARDebugLevel.NONE;
}
}
toggleStats(args: PropertyChangeData): void {
if (args.value !== null && args.value !== this.statsEnabled) {
this.statsEnabled = args.value;
this.ar.toggleStatistics(this.statsEnabled);
}
}
reset(): void {
if (this.ar) {
this.ar.reset();
}
this.firstPlaneDetected = false;
this.hint = "Search for planes by scanning a surface";
}
arLoaded(args): void {
this.ar = args.object;
}
planeDetected(args): void {
console.log("Plane detected @ " + new Date().getTime());
}
planeTapped(args: ARPlaneTappedEventData): void {
this.hint = `Tapped at ${args.position.x} y ${args.position.y} z ${args.position.z}`;
if (this.selectedModelIndex === 0) {
this.addBox(args.position);
} else if (this.selectedModelIndex === 1) {
this.addSphere(args.position);
} else if (this.selectedModelIndex === 2) {
this.addTube(args.position);
} else if (this.selectedModelIndex === 3) {
this.addCar(args.position);
} else if (this.selectedModelIndex === 4) {
this.addBall(args.position);
} else if (this.selectedModelIndex === 5) {
this.addTree(args.position);
}
}
private addBox(position: ARPosition): void {
this.ar.addBox({
materials: [{
diffuse: {
contents: "Assets.scnassets/Materials/tnsgranite/tnsgranite-diffuse.png",
wrapMode: "ClampToBorder"
}
}],
position: {x: position.x, y: position.y + 0.8, z: position.z},
dimensions: 0.15,
mass: 1,
onTap: ((model: ARNodeInteraction) => {
console.log("Box was tapped");
}),
onLongPress: ((model: ARNodeInteraction) => {
model.node.remove();
})
}).then(arNode => {
console.log("Box successfully added");
if (arNode.ios) {
// do something iOS specific here if you like
}
});
}
private addSphere(position: ARPosition): void {
this.ar.addSphere({
materials: [{
diffuse: new Color("red"),
normal: new Color("blue"),
roughness: new Color("green"),
specular: new Color("yellow"),
metalness: new Color("purple"),
transparency: 0.9
}],
position: {x: position.x, y: position.y + 1.3, z: position.z},
radius: 0.2,
mass: 0.01,
onTap: ((model: ARNodeInteraction) => {
console.log("Sphere was tapped");
}),
onLongPress: ((model: ARNodeInteraction) => {
model.node.remove();
})
}).then(arNode => {
console.log("Sphere successfully added at " + JSON.stringify(arNode.position));
});
}
private addTube(position: ARPosition): void {
this.ar.addTube({
// TODO a tube also has 4 surfaces we can use different materials for (https://developer.apple.com/documentation/scenekit/scntube?language=objc)
// TODO a youtube texture :P
materials: [{
diffuse: {
contents: "Assets.scnassets/Materials/tnsgranite/tnsgranite-diffuse.png",
wrapMode: "Repeat" // which is the default
},
roughness: "Assets.scnassets/Materials/tnsgranite/tnsgranite-roughness.png",
transparency: 1 // solid (which is the default)
}],
position: {x: position.x, y: position.y + 0.3, z: position.z},
innerRadius: 0.2,
outerRadius: 0.35,
radialSegmentCount: 1000,
height: 0.7,
mass: 8,
onTap: ((model: ARNodeInteraction) => {
console.log("Tube was tapped");
}),
onLongPress: ((model: ARNodeInteraction) => {
model.node.remove();
})
}).then(arNode => {
console.log("Tube successfully added");
});
}
private addBall(position: ARPosition): void {
this.ar.addModel({
name: "Models.scnassets/Ball.dae",
position: {x: position.x, y: position.y + 1.3, z: position.z},
scale: 0.08,
mass: 0.2,
onTap: ((model: ARNodeInteraction) => {
console.log("Ball was tapped");
}),
onLongPress: ((model: ARNodeInteraction) => {
model.node.remove();
})
}).then(arNode => {
// to remove balls after a few seconds you can do this:
setTimeout(() => {
arNode.remove();
}, 2000);
});
}
private addCar(position: ARPosition): void {
this.ar.addModel({
name: "Models.scnassets/Car.dae",
position: {x: position.x, y: position.y + 0.06, z: position.z},
scale: 0.75,
mass: 100,
onTap: ((model: ARNodeInteraction) => {
console.log("Car was tapped");
}),
onLongPress: ((model: ARNodeInteraction) => {
model.node.remove();
})
});
}
private addTree(position: ARPosition): void {
this.ar.addModel({
name: "Models.scnassets/Tree.dae",
childNodeName: "Tree_lp_11",
position: position,
scale: 0.01,
mass: 0.0002,
onTap: ((model: ARNodeInteraction) => {
console.log("Tree was tapped");
}),
onLongPress: ((model: ARNodeInteraction) => {
model.node.remove();
})
});
}
protected getScreenName(): string {
return "AR";
}
protected getPluginInfo(): PluginInfoWrapper {
return new PluginInfoWrapper(
"Is normal reality too dull for you? Augment it with the nativescript-ar plugin!",
Array.of(
new PluginInfo(
"nativescript-ar",
"AR 👀",
"https://github.com/EddyVerbruggen/nativescript-ar",
"Proof of Concept of an AR plugin. Currently supporting ARKit (iOS), and in the future ARCore (Android) as well."
),
new PluginInfo(
"nativescript-drop-down",
"DropDown",
"https://github.com/PeterStaev/NativeScript-Drop-Down",
"The DropDown displays items from which the user can select one. If the built-in ActionSheet is not to your liking, give this one a try!"
),
new PluginInfo(
"nativescript-flashlight",
"Flashlight 🔦",
"https://github.com/tjvantoll/nativescript-flashlight/",
"Use the device torch in your app!"
),
new PluginInfo(
"nativescript-insomnia",
"Insomnia 😪",
"https://github.com/EddyVerbruggen/nativescript-insomnia",
"Keep the device awake (not dim the screen, lock, etc). Useful if the user needs to see stuff on the device but doesn't touch it."
)
)
);
}
} | the_stack |
import { Component, OnInit, OnDestroy, ViewChild } from '@angular/core';
import { environment } from './../../../../../environments/environment';
import { ActivatedRoute, Router } from '@angular/router';
import { Subscription } from 'rxjs/Subscription';
import { UtilsService } from '../../../../shared/services/utils.service';
import { LoggerService } from '../../../../shared/services/logger.service';
import { ErrorHandlingService } from '../../../../shared/services/error-handling.service';
import 'rxjs/add/operator/filter';
import 'rxjs/add/operator/pairwise';
import * as _ from "lodash";
import { RefactorFieldsService } from './../../../../shared/services/refactor-fields.service';
import { WorkflowService } from '../../../../core/services/workflow.service';
import { RouterUtilityService } from '../../../../shared/services/router-utility.service';
import { AdminService } from '../../../services/all-admin.service';
import { NgForm } from '@angular/forms';
import { SelectComponent , SelectItem} from 'ng2-select';
import { UploadFileService } from '../../../services/upload-file-service';
@Component({
selector: 'app-admin-update-rule',
templateUrl: './update-rule.component.html',
styleUrls: ['./update-rule.component.css'],
providers: [
LoggerService,
ErrorHandlingService,
UploadFileService,
AdminService
]
})
export class UpdateRuleComponent implements OnInit, OnDestroy {
@ViewChild('targetType') targetTypeSelectComponent: SelectComponent;
// @ViewChild('ruleFrequencyMonthDay') ruleFrequencyMonthDayComponent: SelectComponent;
@ViewChild('ruleFrequency') ruleFrequencyComponent: SelectComponent;
@ViewChild('ruleSeverity') ruleSeverityComponent: SelectComponent;
@ViewChild('ruleCategory') ruleCategoryComponent: SelectComponent;
ruleFrequencyMonth: any;
ruleFrequencyDay: any;
ruleFrequencyMonths: any;
ruleFrequencyDays: any;
ruleFrequencyModeValue: any;
ruleDisplayName: String = '';
weekName: any;
ruleRestUrl: any;
alexaKeywords: any;
assetGroup: any = [];
pageTitle: String = 'Update Rule';
allPolicies: any = [];
breadcrumbArray: any = ['Admin', 'Rules'];
breadcrumbLinks: any = ['policies', 'rules'];
breadcrumbPresent: any;
outerArr: any = [];
dataLoaded: boolean = false;
errorMessage: any;
showingArr: any = ['policyName', 'policyId', 'policyDesc'];
allColumns: any = [];
totalRows: number = 0;
currentBucket: any = [];
bucketNumber: number = 0;
firstPaginator: number = 1;
lastPaginator: number;
currentPointer: number = 0;
seekdata: boolean = false;
showLoader: boolean = true;
allMonthDays: any = [];
allEnvironments: any = [];
allRuleParams: any = [];
hideContent: boolean = true;
isRuleUpdationFailed: boolean = false;
isRuleUpdationSuccess: boolean = false;
ruleLoader: boolean = false;
ruleContentLoader: boolean = true;
ruleDetails: any = { ruleId: '', policyId: '', ruleName: '', assetGroup: '', dataSource: '', targetType: '' };
ruleFrequency: any = []
paginatorSize: number = 25;
isLastPage: boolean;
isFirstPage: boolean;
totalPages: number;
pageNumber: number = 0;
searchTxt: String = '';
dataTableData: any = [];
initVals: any = [];
tableDataLoaded: boolean = false;
filters: any = [];
searchCriteria: any;
filterText: any = {};
ruleId = '';
errorValue: number = 0;
showGenericMessage: boolean = false;
dataTableDesc: String = '';
urlID: String = '';
FullQueryParams: any;
queryParamsWithoutFilter: any;
urlToRedirect: any = '';
mandatory: any;
allRuleParamKeys: any = [];
allEnvParamKeys: any = [];
activePolicy: any = [];
parametersInput: any = { ruleKey: '', ruleValue: '', envKey: '', envValue: '' };
allAlexaKeywords: any = [];
assetGroupNames: any = [];
datasourceDetails: any = [];
targetTypesNames: any = [];
ruleCategories = [];
ruleSeverities = ["critical","high","medium","low"];
allPolicyIds: any = [];
allFrequencies: any = ['Daily', 'Hourly', 'Minutes', 'Monthly', 'Weekly', 'Yearly'];
allMonths: any = [
{ text: 'January', id: 0 },
{ text: 'February', id: 1 },
{ text: 'March', id: 2 },
{ text: 'April', id: 3 },
{ text: 'May', id: 4 },
{ text: 'June', id: 5 },
{ text: 'July', id: 6 },
{ text: 'August', id: 7 },
{ text: 'September', id: 8 },
{ text: 'October', id: 9 },
{ text: 'November', id: 10 },
{ text: 'December', id: 11 }
];
isAlexaKeywordValid: any = -1;
ruleJarFile: any;
currentFileUpload: File;
selectedFiles: FileList;
ruleType: any = 'Classic';
selectedFrequency: any = '';
selectedSeverity:any = '';
selectedCategory:any = '';
ruleJarFileName: any = '';
selectedPolicyId: any = '';
selectedTargetType: any = '';
isAutofixEnabled: boolean = false;
isFileChanged: boolean = false;
public labels: any;
private previousUrl: any = '';
private pageLevel = 0;
public backButtonRequired;
private routeSubscription: Subscription;
private getKeywords: Subscription;
private previousUrlSubscription: Subscription;
private downloadSubscription: Subscription;
constructor(
private activatedRoute: ActivatedRoute,
private router: Router,
private utils: UtilsService,
private logger: LoggerService,
private errorHandling: ErrorHandlingService,
private uploadService: UploadFileService,
private refactorFieldsService: RefactorFieldsService,
private workflowService: WorkflowService,
private routerUtilityService: RouterUtilityService,
private adminService: AdminService
) {
this.routerParam();
this.updateComponent();
}
ngOnInit() {
this.urlToRedirect = this.router.routerState.snapshot.url;
this.breadcrumbPresent = 'Update Rule';
this.backButtonRequired = this.workflowService.checkIfFlowExistsCurrently(
this.pageLevel
);
}
dataMarshalling(dataToMarshall) {
let fullPolicies = [];
for (let index = 0; index < dataToMarshall.length; index++) {
let policyItem = {};
policyItem['createdDate'] = dataToMarshall[index][0];
policyItem['modifiedDate'] = dataToMarshall[index][1];
policyItem['resolution'] = dataToMarshall[index][2];
policyItem['policyDesc'] = dataToMarshall[index][3];
policyItem['policyId'] = dataToMarshall[index][4];
policyItem['policyUrl'] = dataToMarshall[index][5];
policyItem['policyVersion'] = dataToMarshall[index][6];
policyItem['policyName'] = dataToMarshall[index][7];
policyItem['numberOfRules'] = dataToMarshall[index][8];
fullPolicies.push(policyItem);
}
return fullPolicies;
}
nextPage() {
try {
if (!this.isLastPage) {
this.pageNumber++;
this.showLoader = true;
}
} catch (error) {
this.errorMessage = this.errorHandling.handleJavascriptError(error);
this.logger.log('error', error);
}
}
prevPage() {
try {
if (!this.isFirstPage) {
this.pageNumber--;
this.showLoader = true;
}
} catch (error) {
this.errorMessage = this.errorHandling.handleJavascriptError(error);
this.logger.log('error', error);
}
}
getAlexaKeywords() {
let url = environment.allAlexaKeywords.url;
let method = environment.allAlexaKeywords.method;
this.adminService.executeHttpAction(url, method, {}, {}).subscribe(reponse => {
this.showLoader = false;
this.allAlexaKeywords = reponse[0];
this.getRuleCategoryDetails();
},
error => {
this.allAlexaKeywords = [];
this.errorMessage = 'apiResponseError';
this.showLoader = false;
});
}
getDatasourceDetails() {
let url = environment.datasourceDetails.url;
let method = environment.datasourceDetails.method;
this.adminService.executeHttpAction(url, method, {}, {}).subscribe(reponse => {
this.showLoader = false;
let fullDatasourceNames = [];
for (let index = 0; index < reponse[0].length; index++) {
let datasourceDetail = reponse[0][index];
fullDatasourceNames.push(datasourceDetail[0]);
}
this.datasourceDetails = fullDatasourceNames;
},
error => {
this.datasourceDetails = [];
this.errorMessage = 'apiResponseError';
this.showLoader = false;
});
}
getRuleCategoryDetails() {
const url = environment.ruleCategory.url;
const method = environment.ruleCategory.method;
this.adminService.executeHttpAction(url, method, {}, {}).subscribe(reponse => {
const categories = [];
for (let index = 0; index < reponse[0].length; index++) {
const categoryDetail = reponse[0][index];
categories.push(categoryDetail.ruleCategory);
}
this.ruleCategories = categories;
this.showLoader = false;
},
error => {
this.ruleCategories = [];
this.errorMessage = 'apiResponseError';
this.showLoader = false;
});
}
getAllAssetGroupNames() {
let url = environment.assetGroupNames.url;
let method = environment.assetGroupNames.method;
this.adminService.executeHttpAction(url, method, {}, {}).subscribe(reponse => {
this.showLoader = false;
this.assetGroupNames = reponse[0];
},
error => {
this.assetGroupNames = [];
this.errorMessage = 'apiResponseError';
this.showLoader = false;
});
}
updateRule(form: NgForm) {
this.hideContent = true;
this.ruleLoader = true;
this.buildAndUpdateRuleModel(form.value);
}
private buildAndUpdateRuleModel(ruleForm) {
let newRuleModel = Object();
newRuleModel.assetGroup = ruleForm.assetGroup[0].text;
newRuleModel.ruleId = this.FullQueryParams.ruleId;
newRuleModel.assetGroup = ruleForm.assetGroup[0].text;
newRuleModel.alexaKeyword = ruleForm.alexaKeywords;
newRuleModel.ruleFrequency = this.buildRuleFrequencyCronJob(ruleForm);
newRuleModel.ruleExecutable = this.ruleJarFileName;
newRuleModel.ruleRestUrl = this.getRuleRestUrl(ruleForm);
newRuleModel.ruleType = ruleForm.ruleType;
newRuleModel.isFileChanged = this.isFileChanged;
newRuleModel.displayName = ruleForm.ruleDisplayName;
newRuleModel.ruleParams = this.buildRuleParams();
newRuleModel.isAutofixEnabled = ruleForm.isAutofixEnabled;
newRuleModel.severity = this.selectedSeverity;
newRuleModel.category = this.selectedCategory;
if (this.isFileChanged) {
this.currentFileUpload = this.selectedFiles.item(0);
} else {
this.currentFileUpload = null;
}
let url = environment.updateRule.url;
let method = environment.updateRule.method;
this.uploadService.pushFileToStorage(url, method, this.currentFileUpload, newRuleModel).subscribe(event => {
this.ruleLoader = false;
this.isRuleUpdationSuccess = true;
},
error => {
this.isRuleUpdationFailed = true;
this.ruleLoader = false;
})
}
private buildRuleParams() {
let ruleParms = Object();
ruleParms.params = this.allRuleParams;
ruleParms.environmentVariables = this.allEnvironments;
return JSON.stringify(ruleParms);
}
private getRuleRestUrl(ruleForm) {
let ruleType = ruleForm.ruleType;
if (ruleType === 'Serverless') {
return ruleForm.ruleRestUrl;
} else {
return '';
}
}
private buildRuleFrequencyCronJob(ruleForm) {
let selectedFrequencyType = ruleForm.ruleFrequency[0].text;
let cronDetails = Object();
cronDetails.interval = selectedFrequencyType;
if (selectedFrequencyType === 'Yearly') {
cronDetails.day = ruleForm.ruleFrequencyDay[0].id;
cronDetails.month = (ruleForm.ruleFrequencyMonth[0].id + 1);
} else if (selectedFrequencyType === 'Monthly') {
cronDetails.duration = parseInt(ruleForm.ruleFrequencyMonths);
cronDetails.day = parseInt(ruleForm.ruleFrequencyDays);
} else if (selectedFrequencyType === 'Weekly') {
cronDetails.week = ruleForm.weekName;
} else {
cronDetails.duration = parseInt(ruleForm.ruleFrequencyModeValue);
}
return this.generateExpression(cronDetails);
}
private generateExpression(cronDetails) {
let getCronExpression = function (cronObj) {
if (cronObj === undefined || cronObj === null) {
return undefined;
} else {
let cronObjFields = ['minutes', 'hours', 'dayOfMonth', 'month', 'dayOfWeek', 'year'];
let cronExpression = cronObj.minutes;
for (let index = 1; index < cronObjFields.length; index++) {
cronExpression = cronExpression + ' ' + cronObj[cronObjFields[index]];
}
return cronExpression;
}
};
let isValid = function (cronValidity) {
if (cronValidity.minutes && cronValidity.hours && cronValidity.dayOfMonth && cronValidity.month && cronValidity.dayOfWeek && cronValidity.year) {
return true;
}
return false;
};
let cronObj = {};
if (cronDetails.interval == 'Minutes') {
cronObj = {
minutes: '0/' + cronDetails.duration,
hours: '*',
dayOfMonth: '*',
month: '*',
dayOfWeek: '?',
year: '*'
};
} else if (cronDetails.interval == 'Hourly') {
cronObj = {
minutes: '0',
hours: '0/' + cronDetails.duration,
dayOfMonth: '*',
month: '*',
dayOfWeek: '?',
year: '*'
};
} else if (cronDetails.interval == 'Daily') {
cronObj = {
minutes: '0',
hours: '0',
dayOfMonth: '1/' + cronDetails.duration,
month: '*',
dayOfWeek: '?',
year: '*'
};
} else if (cronDetails.interval == 'Weekly') {
cronObj = {
minutes: '0',
hours: '0',
dayOfMonth: '?',
month: '*',
dayOfWeek: cronDetails.week,
year: '*'
};
} else if (cronDetails.interval == 'Monthly') {
cronObj = {
minutes: '0',
hours: '0',
dayOfMonth: cronDetails.day,
month: '1/' + cronDetails.duration,
dayOfWeek: '?',
year: '*'
};
} else if (cronDetails.interval == 'Yearly') {
cronObj = {
minutes: '0',
hours: '0',
dayOfMonth: cronDetails.day,
month: cronDetails.month,
dayOfWeek: '?',
year: '*'
};
}
return getCronExpression(cronObj);
};
closeErrorMessage() {
this.isRuleUpdationFailed = false;
this.hideContent = false;
}
onJarFileChange(event) {
this.selectedFiles = event.target.files;
this.ruleJarFileName = this.selectedFiles[0].name;
let extension = this.ruleJarFileName.substring(this.ruleJarFileName.lastIndexOf(".")+1);
if(extension!=='jar') {
this.removeJarFileName();
}
this.isFileChanged = true;
}
removeJarFileName() {
this.ruleJarFileName = '';
this.ruleJarFile = '';
this.isFileChanged = true;
}
openJarFileBrowser(event) {
let element: HTMLElement = document.getElementById('selectJarFile') as HTMLElement;
element.click();
}
getTargetTypeNamesByDatasourceName(datasourceName) {
let url = environment.targetTypesByDatasource.url;
let method = environment.targetTypesByDatasource.method;
this.adminService.executeHttpAction(url, method, {}, { dataSourceName: datasourceName }).subscribe(reponse => {
this.showLoader = false;
this.targetTypesNames = reponse[0];
if (this.targetTypesNames.length > 0) {
this.targetTypeSelectComponent.disabled = false;
this.targetTypeSelectComponent.placeholder = 'Select Target Type';
} else {
this.targetTypeSelectComponent.placeholder = 'No Target Available';
}
},
error => {
this.allPolicyIds = [];
this.errorMessage = 'apiResponseError';
this.showLoader = false;
});
}
getAllPolicyIds() {
let url = environment.allPolicyIds.url;
let method = environment.allPolicyIds.method;
this.adminService.executeHttpAction(url, method, {}, {}).subscribe(reponse => {
this.showLoader = false;
this.allPolicyIds = reponse[0];
},
error => {
this.allPolicyIds = [];
this.errorMessage = 'apiResponseError';
this.showLoader = false;
});
}
getRuleDetails() {
let url = environment.getRuleById.url;
let method = environment.getRuleById.method;
let ruleId: string = this.FullQueryParams.ruleId;
this.adminService.executeHttpAction(url, method, {}, { ruleId: ruleId }).subscribe(reponse => {
this.allRuleParamKeys = [];
this.allEnvParamKeys = [];
this.ruleDetails = reponse[0];
let ruleParams = Object();
this.ruleDetails.dataSource = 'N/A';
this.allEnvironments = [];
this.allRuleParams = [];
this.isAutofixEnabled = false;
this.ruleDisplayName = this.ruleDetails.displayName;
ruleParams = JSON.parse(this.ruleDetails.ruleParams);
if (ruleParams.hasOwnProperty('pac_ds')) {
this.ruleDetails.dataSource = ruleParams.pac_ds;
}
if (ruleParams.hasOwnProperty('environmentVariables')) {
this.allEnvironments = ruleParams.environmentVariables;
this.allEnvParamKeys = _.map(ruleParams.environmentVariables, 'key');
}
if (ruleParams.hasOwnProperty('params')) {
if (ruleParams.params instanceof Array) {
for (let i = ruleParams.params.length - 1; i >= 0; i -= 1) {
if(ruleParams.params[i].key == 'severity') {
this.selectedSeverity = ruleParams.params[i].value;
this.ruleSeverityComponent.active.push(new SelectItem(ruleParams.params[i].value));
ruleParams.params.splice(i,1);
} else if(ruleParams.params[i].key == 'ruleCategory') {
this.selectedCategory = ruleParams.params[i].value;
this.ruleCategoryComponent.active.push(new SelectItem(ruleParams.params[i].value));
ruleParams.params.splice(i,1);
}
}
this.allRuleParams = ruleParams.params;
this.allRuleParamKeys = _.map(ruleParams.params, 'key');
}
}
if (ruleParams.hasOwnProperty('autofix')) {
this.isAutofixEnabled = ruleParams.autofix;
}
this.ruleType = this.ruleDetails.ruleType;
this.alexaKeywords = this.ruleDetails.alexaKeyword;
this.isAlexaKeywordValid = 1;
if (this.ruleDetails.assetGroup !== '') {
this.assetGroup = [{ 'text': this.ruleDetails.assetGroup, 'id': this.ruleDetails.assetGroup }];;
}
if (this.ruleType === 'Classic') {
this.ruleJarFileName = this.ruleDetails.ruleExecutable;
} else if (this.ruleType === 'Serverless') {
this.ruleRestUrl = this.ruleDetails.ruleRestUrl;
}
let frequencyforEdit = this.decodeCronExpression(this.ruleDetails.ruleFrequency);
this.ruleFrequency = [{ 'text': frequencyforEdit.interval, 'id': frequencyforEdit.interval }];
this.onSelectFrequency(frequencyforEdit.interval);
if (frequencyforEdit.interval.toLowerCase() === 'yearly') {
this.ruleFrequencyDay = [{ text: frequencyforEdit.day, id: frequencyforEdit.day }];
this.ruleFrequencyMonth = [this.allMonths[parseInt(frequencyforEdit.month) - 1]];
} else if (frequencyforEdit.interval.toLowerCase() === 'monthly') {
this.ruleFrequencyMonths = frequencyforEdit.duration;
this.ruleFrequencyDays = frequencyforEdit.day;
} else if (frequencyforEdit.interval.toLowerCase() === 'weekly') {
this.weekName = frequencyforEdit.week;
} else {
this.ruleFrequencyModeValue = frequencyforEdit.duration;
}
this.hideContent = false;
this.ruleContentLoader = false;
},
error => {
this.errorMessage = 'apiResponseError';
this.showLoader = false;
});
}
public onSelectDatasource(datasourceName: any): void {
this.targetTypeSelectComponent.items = [];
this.targetTypeSelectComponent.disabled = true;
if (this.targetTypeSelectComponent.active) {
this.targetTypeSelectComponent.active.length = 0;
}
this.getTargetTypeNamesByDatasourceName(datasourceName.text);
}
checkForDuration(cronValue) {
let arr = cronValue.split('/');
if (arr.length > 1) {
return arr[1];
} else {
return arr[0];
}
}
checkIfCronValueIsForMonthly(cronValue) {
let arr = cronValue.split('/');
if (arr.length > 1) {
return true;
} else {
return false;
}
}
checkForSpecialCharactersInCron(cronValue) {
return (cronValue != '*' && cronValue != '?' && cronValue != '0')
}
decodeCronExpression(expression) {
let intervals = ['Minutes', 'Hourly', 'Daily', 'Monthly', 'Weekly', 'Yearly'];
let decodedObject = {
day: '',
duration: '',
interval: '',
month: '',
week: ''
};
let expressionSplitArr = expression.split(' ');
for (let i = 0; i < expressionSplitArr.length; i++) {
if (i === 0 || i === 1) {
if (this.checkForSpecialCharactersInCron(expressionSplitArr[i])) {
decodedObject.duration = this.checkForDuration(expressionSplitArr[i]);
decodedObject.interval = intervals[i];
break;
}
} else if (i === 2) {
if (this.checkForSpecialCharactersInCron(expressionSplitArr[i])) {
let j = i + 1;
let monthlyExpressionValue = this.checkForSpecialCharactersInCron(expressionSplitArr[j]);
if (monthlyExpressionValue) {
decodedObject.day = this.checkForDuration(expressionSplitArr[i]);
} else {
decodedObject.duration = this.checkForDuration(expressionSplitArr[i]);
decodedObject.interval = intervals[i];
break;
}
}
} else if (i === 3) {
if (this.checkForSpecialCharactersInCron(expressionSplitArr[i])) {
if (this.checkIfCronValueIsForMonthly(expressionSplitArr[i])) {
decodedObject.duration = this.checkForDuration(expressionSplitArr[i]);
decodedObject.interval = intervals[i];
} else {
decodedObject.month = this.checkForDuration(expressionSplitArr[i]);
decodedObject.interval = intervals[intervals.length - 1];
}
break;
}
} else if (i === 4) {
if (this.checkForSpecialCharactersInCron(expressionSplitArr[i])) {
decodedObject.week = this.checkForDuration(expressionSplitArr[i]);
decodedObject.interval = intervals[i];
break;
}
}
}
return decodedObject;
}
addEnvironmentParameters(parametersInput: any, isEncrypted: any) {
if (parametersInput.envKey !== '' && parametersInput.envValue !== '') {
this.allEnvironments.push({ key: parametersInput.envKey.trim(), value: parametersInput.envValue.trim(), isValueNew: true, encrypt: isEncrypted.checked });
this.allEnvParamKeys.push(parametersInput.envKey.trim());
parametersInput.envKey = '';
parametersInput.envValue = '';
isEncrypted.checked = false;
}
}
addRuleParameters(parametersInput: any, isEncrypted: any) {
if (parametersInput.ruleKey !== '' && parametersInput.ruleValue !== '') {
this.allRuleParams.push({ key: parametersInput.ruleKey.trim(), value: parametersInput.ruleValue.trim(), isValueNew: true, encrypt: isEncrypted.checked });
this.allRuleParamKeys.push(parametersInput.ruleKey.trim());
parametersInput.ruleKey = '';
parametersInput.ruleValue = '';
isEncrypted.checked = false;
}
}
removeRuleParameters(index: number): void {
this.allRuleParamKeys.splice(index, 1);
this.allRuleParams.splice(index, 1);
}
removeEnvironmentParameters(index: number): void {
this.allEnvParamKeys.splice(index, 1);
this.allEnvironments.splice(index, 1);
}
isAlexaKeywordAvailable(alexaKeyword) {
if (alexaKeyword.length == 0) {
this.isAlexaKeywordValid = -1;
} else {
if (alexaKeyword.toLowerCase() === this.ruleDetails.alexaKeyword.toLowerCase()) {
this.isAlexaKeywordValid = 1;
} else {
let isKeywordExits = this.allAlexaKeywords.findIndex(item => alexaKeyword.toLowerCase() === item.toLowerCase());
if (isKeywordExits === -1) {
this.isAlexaKeywordValid = 1;
} else {
this.isAlexaKeywordValid = 0;
}
}
}
}
onSelectPolicyId(policyId: any) {
this.selectedPolicyId = policyId.text;
}
onSelectTargetType(targetType: any) {
this.selectedTargetType = targetType.text;
}
onSelectFrequency(selectedFrequency) {
this.selectedFrequency = selectedFrequency;
}
onSelectFrequencyMonthDay(selectedMonthDay) {
}
onSelectSeverity(selectedSeverity) {
this.selectedSeverity = selectedSeverity;
}
onSelectCategory(selectedCategory) {
this.selectedCategory = selectedCategory;
}
onSelectFrequencyMonth(selectedMonth) {
let monthDays: any = [];
let daysCount = this.getNumberOfDays(selectedMonth.id);
for (let dayNo = 1; dayNo <= daysCount; dayNo++) {
monthDays.push({ id: dayNo, text: dayNo.toString() });
}
this.allMonthDays = monthDays;
}
private getNumberOfDays = function (month) {
let year = new Date().getFullYear();
let isLeap = ((year % 4) == 0 && ((year % 100) != 0 || (year % 400) == 0));
return [31, (isLeap ? 29 : 28), 31, 30, 31, 30, 31, 31, 30, 31, 30, 31][month];
}
getData() {
this.getAllPolicyIds();
this.getAlexaKeywords();
this.getDatasourceDetails();
this.getAllAssetGroupNames();
this.getRuleDetails();
}
/*
* This function gets the urlparameter and queryObj
*based on that different apis are being hit with different queryparams
*/
routerParam() {
try {
// this.filterText saves the queryparam
let currentQueryParams = this.routerUtilityService.getQueryParametersFromSnapshot(this.router.routerState.snapshot.root);
if (currentQueryParams) {
this.FullQueryParams = currentQueryParams;
this.ruleId = this.FullQueryParams.ruleId;
this.queryParamsWithoutFilter = JSON.parse(JSON.stringify(this.FullQueryParams));
delete this.queryParamsWithoutFilter['filter'];
/**
* The below code is added to get URLparameter and queryparameter
* when the page loads ,only then this function runs and hits the api with the
* filterText obj processed through processFilterObj function
*/
this.filterText = this.utils.processFilterObj(
this.FullQueryParams
);
this.urlID = this.FullQueryParams.TypeAsset;
//check for mandatory filters.
if (this.FullQueryParams.mandatory) {
this.mandatory = this.FullQueryParams.mandatory;
}
}
} catch (error) {
this.errorMessage = this.errorHandling.handleJavascriptError(error);
this.logger.log('error', error);
}
}
/**
* This function get calls the keyword service before initializing
* the filter array ,so that filter keynames are changed
*/
updateComponent() {
this.outerArr = [];
this.searchTxt = '';
this.currentBucket = [];
this.bucketNumber = 0;
this.firstPaginator = 1;
this.showLoader = true;
this.currentPointer = 0;
this.dataTableData = [];
this.tableDataLoaded = false;
this.dataLoaded = false;
this.seekdata = false;
this.errorValue = 0;
this.showGenericMessage = false;
this.getData();
}
navigateBack() {
try {
this.workflowService.goBackToLastOpenedPageAndUpdateLevel(this.router.routerState.snapshot.root);
} catch (error) {
this.logger.log('error', error);
}
}
massageData(data) {
let refactoredService = this.refactorFieldsService;
let newData = [];
let formattedFilters = data.map(function (data) {
let keysTobeChanged = Object.keys(data);
let newObj = {};
keysTobeChanged.forEach(element => {
let elementnew =
refactoredService.getDisplayNameForAKey(
element
) || element;
newObj = Object.assign(newObj, { [elementnew]: data[element] });
});
newObj['Actions'] = '';
newData.push(newObj);
});
return newData;
}
processData(data) {
try {
let innerArr = {};
let totalVariablesObj = {};
let cellObj = {};
this.outerArr = [];
let getData = data;
let getCols = Array();
if (getData.length) {
getCols = Object.keys(getData[0]);
} else {
this.seekdata = true;
}
for (let row = 0; row < getData.length; row++) {
innerArr = {};
for (let col = 0; col < getCols.length; col++) {
if (getCols[col].toLowerCase() == 'actions') {
cellObj = {
link: true,
properties: {
'text-shadow': '0.33px 0',
'color': '#ed0295'
},
colName: getCols[col],
hasPreImg: false,
valText: 'Edit',
imgLink: '',
text: 'Edit',
statusProp: {
'color': '#ed0295'
}
};
} else {
cellObj = {
link: '',
properties: {
color: ''
},
colName: getCols[col],
hasPreImg: false,
imgLink: '',
text: getData[row][getCols[col]],
valText: getData[row][getCols[col]]
};
}
innerArr[getCols[col]] = cellObj;
totalVariablesObj[getCols[col]] = '';
}
this.outerArr.push(innerArr);
}
if (this.outerArr.length > getData.length) {
let halfLength = this.outerArr.length / 2;
this.outerArr = this.outerArr.splice(halfLength);
}
this.allColumns = Object.keys(totalVariablesObj);
this.allColumns = ['Policy Id', 'Policy Name', 'Policy Description', 'Policy Version', 'No of Rules', 'Actions'];
} catch (error) {
this.errorMessage = this.errorHandling.handleJavascriptError(error);
this.logger.log('error', error);
}
}
goToCreatePolicy() {
try {
this.workflowService.addRouterSnapshotToLevel(this.router.routerState.snapshot.root);
this.router.navigate(['../create-edit-policy'], {
relativeTo: this.activatedRoute,
queryParamsHandling: 'merge',
queryParams: {
}
});
} catch (error) {
this.errorMessage = this.errorHandling.handleJavascriptError(error);
this.logger.log('error', error);
}
}
goToDetails(row) {
if (row.col === 'Actions') {
try {
this.workflowService.addRouterSnapshotToLevel(this.router.routerState.snapshot.root);
this.router.navigate(['../create-edit-policy'], {
relativeTo: this.activatedRoute,
queryParamsHandling: 'merge',
queryParams: {
policyId: row.row['Policy Id'].text
}
});
} catch (error) {
this.errorMessage = this.errorHandling.handleJavascriptError(error);
this.logger.log('error', error);
}
}
}
searchCalled(search) {
this.searchTxt = search;
}
callNewSearch() {
this.bucketNumber = 0;
this.currentBucket = [];
// this.getPolicyDetails();
}
ngOnDestroy() {
try {
if (this.routeSubscription) {
this.routeSubscription.unsubscribe();
}
if (this.previousUrlSubscription) {
this.previousUrlSubscription.unsubscribe();
}
} catch (error) {
this.logger.log('error', '--- Error while unsubscribing ---');
}
}
} | the_stack |
import type { GraphQLError } from 'graphql';
import WebSocket from 'isomorphic-ws';
import { createDeferredPromise, DeferredPromise, GQLResponse } from '../utils';
import {
GQL_COMPLETE,
GQL_CONNECTION_ACK,
GQL_CONNECTION_ERROR,
GQL_CONNECTION_INIT,
GQL_CONNECTION_KEEP_ALIVE,
GQL_DATA,
GQL_ERROR,
GQL_START,
GQL_STOP,
GRAPHQL_WS,
} from './protocol';
export type OperationHandlerPayload = GQLResponse | 'start' | 'complete';
export interface OperationCallbackArg {
operationId: string;
payload: OperationHandlerPayload;
}
export type OperationCallback = (arg: OperationCallbackArg) => void;
export interface Operation {
started: boolean;
options: {
query: string;
variables?: Record<string, unknown>;
};
callbacks: Set<OperationCallback>;
handler: (data: OperationHandlerPayload) => void;
extensions?: { type: string; payload: unknown }[];
}
export interface ClientOptions {
/**
* Should the websocket connection try to reconnect
*
* @default true
*/
reconnect?: boolean;
/**
* Amount of reconnection attempts
*
* @default Infinity
*/
maxReconnectAttempts?: number;
connectionCallback?: () => void;
failedConnectionCallback?: (payload: unknown) => Promise<void>;
failedReconnectCallback?: () => void;
connectionInitPayload?:
| (() => Promise<Record<string, unknown>> | Record<string, unknown>)
| Record<string, unknown>;
headers?: Record<string, string>;
/**
* Controls when should the connection be established.
*
* `false`: Establish a connection immediately.
*
* `true`: Establish a connection on first subscribe and close on last unsubscribe.
*
* @default true
*/
lazy?: boolean;
}
export class Client {
subscriptionQueryMap: Record<string, string>;
socket: WebSocket | null;
headers;
uri;
operationId;
ready;
operations: Map<string, Operation>;
operationsCount: Record<string | number, number>;
tryReconnect;
maxReconnectAttempts;
reconnectAttempts;
connectionCallback;
failedConnectionCallback;
failedReconnectCallback;
connectionInitPayload;
closedByUser?: boolean;
reconnecting?: boolean;
reconnectTimeoutId?: ReturnType<typeof setTimeout>;
lazy;
connectedPromise: DeferredPromise<Error | void>;
socketReady: DeferredPromise<boolean> | undefined;
constructor(
uri: string,
{
headers = {},
reconnect = true,
maxReconnectAttempts = Infinity,
connectionCallback,
failedConnectionCallback,
failedReconnectCallback,
connectionInitPayload = {},
lazy = true,
}: ClientOptions
) {
this.uri = uri;
this.socket = null;
this.operationId = 0;
this.ready = false;
this.operations = new Map();
this.operationsCount = {};
this.lazy = lazy;
this.subscriptionQueryMap = {};
this.headers = headers;
this.tryReconnect = reconnect;
this.maxReconnectAttempts = maxReconnectAttempts;
this.reconnectAttempts = 0;
this.connectionCallback = connectionCallback;
this.failedConnectionCallback = failedConnectionCallback;
this.failedReconnectCallback = failedReconnectCallback;
this.connectionInitPayload = connectionInitPayload;
if (!lazy) this.connect();
this.connectedPromise = createDeferredPromise();
}
connect() {
if (this.socket !== null) return;
this.socket = new WebSocket(this.uri, [GRAPHQL_WS], {
headers: this.headers,
});
const readyPromise = (this.socketReady = createDeferredPromise());
this.socket.onopen = async () => {
if (this.socket && this.socket.readyState === WebSocket.OPEN) {
try {
const payload =
typeof this.connectionInitPayload === 'function'
? await this.connectionInitPayload()
: this.connectionInitPayload;
this.sendMessage(null, GQL_CONNECTION_INIT, payload);
readyPromise.resolve(true);
} catch (err) {
this.close(this.tryReconnect, false);
readyPromise.resolve(false);
}
} else {
readyPromise.resolve(false);
}
};
this.socket.onclose = () => {
if (!this.closedByUser) {
this.close(this.tryReconnect, false);
}
readyPromise.resolve(false);
};
this.socket.onerror = () => {};
this.socket.onmessage = async ({ data }) => {
await this.handleMessage(data.toString('utf-8'));
};
}
close(tryReconnect = false, closedByUser = true) {
this.closedByUser = closedByUser;
this.ready = false;
this.connectedPromise.resolve(Error('Socket closed!'));
if (this.socket !== null) {
if (closedByUser) {
this.unsubscribeAll();
}
this.socket.close();
this.socket = null;
this.reconnecting = false;
if (tryReconnect) {
this.connectedPromise = createDeferredPromise();
for (const operationId of this.operations.keys()) {
const operation = this.operations.get(operationId);
if (operation) {
this.operations.set(operationId, {
...operation,
started: false,
});
}
}
this.reconnect();
}
}
}
getReconnectDelay() {
const delayMs = 100 * Math.pow(2, this.reconnectAttempts);
return Math.min(delayMs, 10000);
}
reconnect() {
if (
this.reconnecting ||
this.reconnectAttempts > this.maxReconnectAttempts
) {
return this.failedReconnectCallback && this.failedReconnectCallback();
}
this.reconnectAttempts++;
this.reconnecting = true;
const delay = this.getReconnectDelay();
this.reconnectTimeoutId = setTimeout(() => {
this.connect();
}, delay);
}
async unsubscribe(operationId: string, forceUnsubscribe = false) {
let count = this.operationsCount[operationId];
count--;
if (count === 0 || forceUnsubscribe) {
this.operationsCount[operationId] = 0;
this.operations.delete(operationId);
await this.sendMessage(operationId, GQL_STOP, null);
if (this.lazy) {
const self = this;
setTimeout(() => {
if (self.operations.size === 0 && this.socket) {
self.close();
}
}, 2000);
}
} else {
this.operationsCount[operationId] = count;
}
}
unsubscribeAll() {
for (const operationId of this.operations.keys()) {
this.unsubscribe(operationId, true).catch(console.error);
}
}
sendMessage(
operationId: number | string | null,
type: string,
payload: unknown = {},
extensions?: unknown
) {
return new Promise<void>(async (resolve, reject) => {
try {
if (this.socketReady) {
const isOk = await this.socketReady.promise;
if (!isOk) return resolve();
}
if (!this.socket) return resolve();
this.socket.send(
JSON.stringify({
id: operationId,
type,
payload,
extensions,
}),
(err) => {
if (err) console.error(err);
resolve();
}
);
setTimeout(resolve, 200);
} catch (err) {
reject(err);
}
});
}
async handleMessage(message: string) {
let data;
let operationId;
let operation;
try {
data = JSON.parse(message);
operationId = data.id;
} catch (e) {
throw new Error(
`Invalid message received: "${message}" Message must be JSON parsable.`
);
}
if (operationId) {
operation = this.operations.get(operationId);
}
switch (data.type) {
case GQL_CONNECTION_ACK:
this.reconnecting = false;
this.ready = true;
this.reconnectAttempts = 0;
this.connectedPromise.resolve();
for (const operationId of this.operations.keys()) {
this.startOperation(operationId).catch(console.error);
}
if (this.connectionCallback) {
this.connectionCallback();
}
break;
case GQL_DATA:
if (operation) operation.handler(data.payload);
break;
case GQL_ERROR:
if (operation) {
operation.handler({
data: null,
errors: [{ message: data.payload } as GraphQLError],
});
this.operations.delete(operationId);
}
break;
case GQL_COMPLETE:
if (operation) {
operation.handler('complete');
this.operations.delete(operationId);
}
break;
case GQL_CONNECTION_ERROR:
this.close(this.tryReconnect, false);
if (this.failedConnectionCallback) {
await this.failedConnectionCallback(data.payload);
}
break;
case GQL_CONNECTION_KEEP_ALIVE:
break;
default:
throw new Error(`Invalid message type: "${data.type}"`);
}
}
async startOperation(operationId: string) {
try {
await this.connectedPromise.promise;
const operation = this.operations.get(operationId);
if (!operation) throw Error('Operation not found, ' + operationId);
const { started, options, extensions } = operation;
if (!started) {
if (!this.ready) return;
this.operations.set(operationId, {
...operation,
started: true,
});
await this.sendMessage(operationId, GQL_START, options, extensions);
}
} finally {
}
}
createSubscription(
query: string,
variables: Record<string, unknown> | undefined,
publish: OperationCallback,
subscriptionString?: string
): Promise<string> | string {
if (!this.socket) this.connect();
subscriptionString ||= JSON.stringify({
query,
variables,
});
let operationId = this.subscriptionQueryMap[subscriptionString];
let startPromise: Promise<void> | undefined;
try {
let existingOperation: Operation | undefined;
if (
operationId &&
(existingOperation = this.operations.get(operationId))
) {
existingOperation.callbacks.add(publish);
this.operationsCount[operationId] =
this.operationsCount[operationId] + 1;
return operationId;
}
operationId = String(++this.operationId);
const callbacks = new Set([publish]);
function handler(payload: OperationHandlerPayload) {
const event: OperationCallbackArg = {
operationId,
payload,
};
for (const cb of callbacks) {
try {
cb(event);
} catch (err) {
console.error(err);
}
}
}
const operation: Operation = {
started: false,
options: { query, variables },
handler,
callbacks,
};
this.operations.set(operationId, operation);
startPromise = this.startOperation(operationId);
this.operationsCount[operationId] = 1;
this.subscriptionQueryMap[subscriptionString] = operationId;
return startPromise.then(() => operationId);
} finally {
function start() {
setTimeout(() => {
publish({
operationId,
payload: 'start',
});
}, 0);
}
if (startPromise) startPromise.then(start);
else start();
}
}
} | the_stack |
import {strict as assert} from 'assert';
import {parallel} from 'async';
import Baobab, {Cursor} from '../../src/baobab';
import state from '../state';
describe('Cursor API', function() {
/**
* Getters
*/
describe('Getters', function() {
/**
* Root level
*/
describe('Root cursor', function() {
const tree = new Baobab(state);
it('should be possible to retrieve full data.', function() {
assert.deepEqual(tree.get(), state);
});
it('should be possible to retrieve nested data.', function() {
const colors = tree.get(['one', 'subtwo', 'colors']);
assert.deepEqual(colors, state.one.subtwo.colors);
// Polymorphism
const primitive = tree.get('primitive');
assert.strictEqual(primitive, 3);
});
it('should be possible to get data from both maps and lists.', function() {
const yellow = tree.get(['one', 'subtwo', 'colors', 1]);
assert.strictEqual(yellow, 'yellow');
});
it('should return undefined when data is not to be found through path.', function() {
const inexistant = tree.get(['no']);
assert.strictEqual(inexistant, undefined);
// Nesting
const nestedInexistant = tree.get(['no', 'no']);
assert.strictEqual(nestedInexistant, undefined);
});
it('should be possible to retrieve items using a function in path.', function() {
const yellow = tree.get('one', 'subtwo', 'colors', (i: string) => i === 'yellow');
assert.strictEqual(yellow, 'yellow');
});
it('should be possible to retrieve items with a descriptor object.', function() {
const firstItem = tree.get('items', {id: 'one'}),
secondItem = tree.get('items', {id: 'two', user: {name: 'John'}});
assert.deepEqual(firstItem, {id: 'one'});
assert.deepEqual(secondItem, {id: 'two', user: {name: 'John', surname: 'Talbot'}});
assert.deepEqual(firstItem, {id: 'one'});
});
it('should not fail when retrieved data is null on the path.', function() {
const nullValue = tree.get('setLater');
assert.strictEqual(nullValue, null);
const inexistant = tree.get('setLater', 'a');
assert.strictEqual(inexistant, undefined);
});
it('should throw when using an invalid path in getters.', function() {
assert.throws(function() {
tree.get([null, false]);
}, /invalid/);
});
it('should be possible to use some projection.', function() {
const altTree = new Baobab({
one: 1,
two: 2
});
assert.deepEqual(
altTree.project({a: ['one'], b: ['two']}),
{
a: 1,
b: 2
}
);
assert.deepEqual(
altTree.project([['one'], ['two']]),
[1, 2]
);
});
it('an unsolved dynamic cursor should get undefined.', function() {
const cursor = tree.select('one', 'subtwo', 'colors', {id: 4});
assert.strictEqual(cursor.solvedPath, null);
assert.strictEqual(cursor.get(), undefined);
});
it('should be possible to tell whether a path exists or not.', function() {
assert.strictEqual(tree.exists(), true);
assert.strictEqual(tree.exists('one'), true);
assert.strictEqual(tree.exists('three'), false);
assert.strictEqual(tree.exists(['one', 'subtwo']), true);
assert.strictEqual(tree.exists('one', 'subtwo'), true);
assert.strictEqual(tree.exists('one', 'subthree'), false);
});
it('should be possible to assess whether an undefined value exists.', function() {
assert.strictEqual(tree.exists('undefinedValue'), true);
assert.strictEqual(tree.exists('setLater'), true);
});
});
/**
* Branch & leaf level
*/
describe('Standard cursors', function() {
const tree = new Baobab(state),
colorCursor = tree.select(['one', 'subtwo', 'colors']),
oneCursor = tree.select('one');
it('should be possible to retrieve data at cursor.', function() {
const colors = colorCursor.get();
assert(colors instanceof Array);
assert.deepEqual(colors, state.one.subtwo.colors);
});
it('should be possible to shallow clone data at cursor.', function() {
const clonedData = colorCursor.clone();
assert(clonedData !== colorCursor.get());
assert(clonedData !== colorCursor.clone());
assert(tree.clone().one === tree.get().one);
});
it('should be possible to deep clone data at cursor.', function() {
const clonedData = tree.deepClone();
assert(clonedData !== tree.get());
assert(tree.deepClone().one !== tree.get().one);
});
it('should be possible to retrieve data with a 0 key.', function() {
const specificTree = new Baobab([1, 2]);
assert.strictEqual(specificTree.get(0), 1);
assert.strictEqual(colorCursor.get(0), 'blue');
});
it('should be possible to retrieve nested data.', function() {
const colors = oneCursor.get(['subtwo', 'colors']);
assert.deepEqual(colors, state.one.subtwo.colors);
});
it('should be possible to use some polymorphism on the getter.', function() {
const colors = oneCursor.get('subtwo', 'colors');
assert.deepEqual(colors, state.one.subtwo.colors);
});
it('should be possible to tell whether a cursor exists or not.', function() {
assert.strictEqual(oneCursor.exists(), true);
assert.strictEqual(oneCursor.select('subtwo', 'colors').exists(), true);
assert.strictEqual(oneCursor.select('subtwo').exists('colors'), true);
assert.strictEqual(oneCursor.select('hey').exists(), false);
});
});
});
/**
* Setters
*/
describe('Setters', function() {
/**
* Root level
*/
describe('Root cursor', function() {
it('should be possible to set a key using a path rather than a key.', function() {
const tree = new Baobab(state, {asynchronous: false});
tree.set(['two', 'age'], 34);
assert.strictEqual(tree.get().two.age, 34);
});
it('should be possible to set a key at an nonexistent path.', function() {
const tree = new Baobab(state, {asynchronous: false});
tree.set(['nonexistent', 'key'], 'hello');
assert.strictEqual(tree.get().nonexistent.key, 'hello');
});
it('should be possible to set a key using a dynamic path.', function() {
const tree = new Baobab(state, {asynchronous: false});
tree.set(['items', {id: 'two'}, 'user', 'age'], 34);
assert.strictEqual(tree.get().items[1].user.age, 34);
});
it('should', function() {
const tree = new Baobab({
list: [{id: 10, hello: 'world'}]
}, {asynchronous: false});
tree.set(['list', {id: 10}, 'one', 'two'], 'monde');
assert.deepEqual(tree.get(), {
list: [
{
id: 10,
hello: 'world',
one: {
two: 'monde'
}
}
]
});
});
it('should fail when setting a nonexistent dynamic path.', function() {
const tree = new Baobab(state, {asynchronous: false});
assert.throws(function() {
tree.set(['items', {id: 'four'}, 'user', 'age'], 34);
}, /solve/);
});
it('should be possible to write the tree synchronously.', function(done) {
const tree = new Baobab({hello: 'John'});
tree.on('update', function() {
done();
});
assert.strictEqual(tree.get('hello'), 'John');
tree.set('hello', 'Jack');
assert.strictEqual(tree.get('hello'), 'Jack');
});
it('using an unknown operation type should throw.', function() {
const tree = new Baobab();
assert.throws(function() {
tree.update([], {type: 'shawarma', value: 'hey'});
}, /unknown/);
});
});
/**
* Branch & leaf level
*/
describe('Standard cursor', function() {
it('should warn the user when too many arguments are applied to a setter.', function() {
const tree = new Baobab(state),
cursor = tree.select('items');
assert.throws(function() {
// @ts-ignore
cursor.set('this', 'is', 'my', 'destiny!');
}, /too many/);
});
it('should throw an error when the provided path is incorrect.', function() {
const tree = new Baobab(state),
cursor = tree.select('items');
assert.throws(function() {
// @ts-ignore
cursor.set(/test/, '45');
}, /invalid path/);
});
it('should be possible to set a key using a path rather than a key.', function() {
const tree = new Baobab(state, {asynchronous: false}),
cursor = tree.select('items');
cursor.set([1, 'user', 'age'], 34);
assert.strictEqual(cursor.get()[1].user.age, 34);
});
it('should be possible to set a key at an nonexistent path.', function() {
const tree = new Baobab(state, {asynchronous: false}),
cursor = tree.select('two');
cursor.set(['nonexistent', 'key'], 'hello');
assert.strictEqual(cursor.get().nonexistent.key, 'hello');
});
it('should be possible to set a key using a dynamic path.', function() {
const tree = new Baobab(state, {asynchronous: false}),
cursor = tree.select('items');
cursor.set([{id: 'two'}, 'user', 'age'], 34);
assert.strictEqual(cursor.get()[1].user.age, 34);
});
it('should fail when setting a nonexistent dynamic path.', function() {
const tree = new Baobab(state, {asynchronous: false}),
cursor = tree.select('items');
assert.throws(function() {
cursor.set([{id: 'four'}, 'user', 'age'], 34);
}, /solve/);
});
it('should fail consistently across possibilities when setting a nonexistent dynamic path.', function() {
const tree = new Baobab({items: [{id: 1}]}, {asynchronous: true});
assert.throws(function() {
tree.set(['items', {id: 2}, 'id'], 3);
}, /solve/);
assert.throws(function() {
tree.select('items', {id: 2}).set('id', 3);
}, /solve/);
});
it('should support setting non-enumerable properties', function() {
const tree = new Baobab(Object.create({}, {
id: {value: 2, writable: true, enumerable: true},
hello: {value: 'world', writable: true, enumerable: false}
})),
cursor = tree.select('hello');
cursor.set('universe');
assert.equal(cursor.get(), 'universe');
assert.equal((Object.getOwnPropertyDescriptor(tree.get(), 'id') || {}).enumerable, true);
assert.equal((Object.getOwnPropertyDescriptor(tree.get(), 'hello') || {}).enumerable, false);
});
it('should support setting deep non-enumerable properties', function() {
const tree = new Baobab(Object.create({}, {
id: {
value: 2,
writable: true,
enumerable: true
},
one: {
value: Object.create({}, {two: {
value: 'three',
writable: true,
enumerable: false
}}),
writable: true,
enumerable: false,
}
})),
cursor = tree.select(['one', 'two']);
cursor.set('four');
assert.equal(tree.get(['one', 'two']), 'four');
assert.equal((Object.getOwnPropertyDescriptor(tree.get(), 'id') || {}).enumerable, true);
assert.equal((Object.getOwnPropertyDescriptor(tree.get(), 'one') || {}).enumerable, false);
assert.equal((Object.getOwnPropertyDescriptor(tree.get('one'), 'two') || {}).enumerable, false);
});
it('should be possible to shallow merge two objects.', function(done) {
const tree = new Baobab({o: {hello: 'world'}, string: 'test'});
const cursor = tree.select('o');
cursor.merge({hello: 'jarl'});
tree.on('update', function() {
assert.deepEqual(tree.get('o'), {hello: 'jarl'});
done();
});
});
it('should be possible to deep merge two objects.', function(done) {
const tree = new Baobab({
data: {
items: {
one: 1
}
}
});
const cursor = tree.select('data');
cursor.deepMerge({items: {two: 2}, hello: 'world'});
tree.on('update', function(e) {
assert.strictEqual(e.data.transaction[0].type, 'deepMerge');
assert.deepEqual(
cursor.get(),
{
items: {
one: 1,
two: 2
},
hello: 'world'
}
);
done();
});
});
it('should be possible to remove keys from a cursor.', function() {
const tree = new Baobab({one: 1, two: {subone: 1, subtwo: 2}}, {asynchronous: false}),
cursor = tree.select('two');
assert.deepEqual(cursor.get(), {subone: 1, subtwo: 2});
cursor.unset('subone');
assert.deepEqual(cursor.get(), {subtwo: 2});
});
it('should be possible to remove data at cursor.', function() {
const tree = new Baobab({one: 1, two: {subone: 1, subtwo: 2}}, {asynchronous: false}),
cursor = tree.select('two');
assert.deepEqual(cursor.get(), {subone: 1, subtwo: 2});
cursor.unset();
assert.strictEqual(cursor.get(), undefined);
});
it('should be possible to unset an array\'s item.', function() {
const tree = new Baobab({list: [1, 2, 3]}),
cursor = tree.select('list');
cursor.unset(1);
assert.deepEqual(cursor.get(), [1, 3]);
assert.strictEqual(cursor.get().length, 2);
});
it('should do nothing to unset an inexistant key.', function() {
const tree = new Baobab();
tree.unset(['one', 'two']);
assert.deepEqual(tree.get(), {});
});
it('should be possible to unset null/undefined values.', function() {
const tree = new Baobab({nullValue: null, undefinedValue: null});
assert(tree.exists('nullValue'));
assert(tree.exists('undefinedValue'));
tree.unset('nullValue');
tree.unset('undefinedValue');
assert.deepEqual(tree.get(), {});
assert(!tree.exists('nullValue'));
assert(!tree.exists('undefinedValue'));
});
it('should be possible to push/unshift/concat to an array.', function() {
const tree = new Baobab([]);
tree.push(2);
tree.unshift(1);
tree.concat([3, 4]);
assert.deepEqual(tree.get(), [1, 2, 3, 4]);
});
it('should be possible to splice an array.', function() {
const tree = new Baobab({list: [1, 2, 3]}, {asynchronous: false}),
cursor = tree.select('list');
assert.deepEqual(cursor.get(), [1, 2, 3]);
cursor.splice([0, 1]);
cursor.splice([1, 1, 4]);
assert.deepEqual(cursor.get(), [2, 4]);
});
it('should be possible to pop an array.', function() {
const ptree = new Baobab({list: [1, 2, 3]}, {asynchronous: false}),
tree = new Baobab({list: [1, 2, 3]}, {asynchronous: false, persistent: false});
ptree.pop('list');
tree.pop('list');
assert.deepEqual(
ptree.get('list'),
[1, 2]
);
assert.deepEqual(
tree.get('list'),
[1, 2]
);
});
it('should be possible to shift an array.', function() {
const ptree = new Baobab({list: [1, 2, 3]}, {asynchronous: false}),
tree = new Baobab({list: [1, 2, 3]}, {asynchronous: false, persistent: false});
ptree.shift('list');
tree.shift('list');
assert.deepEqual(
ptree.get('list'),
[2, 3]
);
assert.deepEqual(
tree.get('list'),
[2, 3]
);
});
it('should be possible to set a falsy value.', function() {
const tree = new Baobab({hello: 'world'}, {asynchronous: false});
tree.set('hello', '');
assert.strictEqual(tree.get('hello'), '');
tree.set('hello', false);
assert.strictEqual(tree.get('hello'), false);
});
it('should be possible to set values using a falsy path.', function() {
const tree = new Baobab({list: ['hey'], dict: {}}, {asynchronous: false});
tree.select('dict').set('', 'hello');
tree.select('list').set(0, 'ho');
assert.deepEqual(tree.get(), {list: ['ho'], dict: {'': 'hello'}});
});
it('should throw errors when updating with wrong values.', function() {
const cursor = (new Baobab()).root;
assert.throws(function() {
// @ts-ignore
cursor.merge('John');
}, /value/);
assert.throws(function() {
// @ts-ignore
cursor.splice('John');
});
assert.throws(function() {
// @ts-ignore
cursor.apply('John');
});
});
});
});
/**
* Events
*/
describe('Events', function() {
it('should be possible to listen to updates.', function(done) {
const tree = new Baobab(state),
colorCursor = tree.select('one', 'subtwo', 'colors');
colorCursor.on('update', function() {
assert.deepEqual(colorCursor.get(), ['blue', 'yellow', 'purple']);
done();
});
colorCursor.push('purple');
});
it('when a parent updates, so does the child.', function(done) {
const tree = new Baobab(state),
parent = tree.select('two'),
child = tree.select(['two', 'firstname']);
let count = 0;
parallel({
parent(next) {
parent.on('update', function(this: Cursor) {
assert.deepEqual({firstname: 'Napoleon', lastname: 'Bonaparte'}, this.get());
count++;
next();
});
},
child(next) {
child.on('update', function() {
count++;
next();
});
}
}, function() {
assert.strictEqual(count, 2);
done();
});
parent.set({firstname: 'Napoleon', lastname: 'Bonaparte'});
});
it('when a child updates, so does the parent.', function(done) {
const tree = new Baobab(state),
parent = tree.select('two'),
child = tree.select(['two', 'firstname']);
let count = 0;
parallel({
parent(next) {
parent.on('update', function() {
count++;
next();
});
},
child(next) {
child.on('update', function() {
count++;
next();
});
}
}, function() {
assert.strictEqual(count, 2);
done();
});
child.set('Napoleon');
});
it('when a leave updates, it should not update its siblings.', function(done) {
const tree = new Baobab({
node: {
leaf1: 'hey',
leaf2: 'ho'
}
});
const parent = tree.select('node'),
leaf1 = parent.select('leaf1'),
leaf2 = parent.select('leaf2');
let count = 0;
const handler = () => count++;
parallel({
node(next) {
parent.on('update', handler);
setTimeout(next, 30);
},
leaf1(next) {
leaf1.on('update', handler);
setTimeout(next, 30);
},
leaf2(next) {
leaf2.on('update', handler);
setTimeout(next, 30);
}
}, function() {
assert.strictEqual(count, 2);
done();
});
leaf1.set('tada');
});
it('should not notify siblings in an array when pushing.', function() {
const tree = new Baobab({list: ['one']}, {asynchronous: false}),
cursor = tree.select('list', 0);
let count = 0;
const listener = () => count++;
cursor.on('update', listener);
tree.push('list', 'two');
tree.set('list', ['three']);
assert.strictEqual(count, 1);
});
it('should be possible to listen to changes in an array.', function(done) {
const tree = new Baobab({list: ['hello', 'world']}),
cursor = tree.select('list', 1);
assert.strictEqual(cursor.get(), 'world');
cursor.on('update', function() {
assert.strictEqual(cursor.get(), 'jacky');
done();
});
cursor.set('jacky');
});
it('should fire update correctly even when root node is affected.', function(done) {
const tree = new Baobab({first: 1, second: 2});
tree.select('first').on('update', function() {
assert.deepEqual(
tree.get(),
{first: 1.1, second: 2.2}
);
done();
});
tree.root.set({first: 1.1, second: 2.2});
});
it('update events should expose the cursor\'s data.', function(done) {
const tree = new Baobab({one: {hello: 'world'}});
tree.select('one').on('update', function(e) {
assert.deepEqual(e.data.previousData, {hello: 'world'});
assert.deepEqual(e.data.currentData, {hello: 'monde'});
done();
});
tree.set(['one', 'hello'], 'monde');
});
it('update events should expose previous computed data.', function(done) {
const tree = new Baobab({
list: ['hey', 'ho'],
currentItem: 0,
current: Baobab.monkey([
['list'],
['currentItem'],
function(list: string[], i: number) {
return list[i];
}
])
});
const cursor = tree.select('current');
cursor.on('update', function({data}) {
assert.strictEqual(data.currentData, 'ho');
assert.strictEqual(data.previousData, 'hey');
done();
});
tree.set('currentItem', 1);
});
it('dynamic cursors should see their solvedPath correctly update on writes.', function(done) {
const tree = new Baobab({colors: []}),
cursor = tree.select('colors', {id: 0});
assert.strictEqual(cursor.get(), undefined);
tree.push('colors', {id: 0, name: 'yellow'});
assert.deepEqual(cursor.get(), {id: 0, name: 'yellow'});
cursor.on('update', done.bind(null, null));
});
});
/**
* Predicates
*/
describe('Predicates', function() {
const tree = new Baobab(state);
it('should be possible to tell whether cursor is root.', function() {
assert(tree.select('one').up()!.isRoot());
assert(!tree.select('one').isRoot());
});
it('should be possible to tell whether cursor is leaf.', function() {
assert(tree.select('primitive').isLeaf());
assert(!tree.select('one').isLeaf());
});
it('should be possible to tell whether cursor is branch.', function() {
assert(tree.select('one').isBranch());
assert(!tree.select('one').up()!.isBranch());
assert(!tree.select('primitive').isBranch());
});
});
/**
* Traversal
*/
describe('Traversal', function() {
const tree = new Baobab(state);
const colorCursor = tree.select(['one', 'subtwo', 'colors']),
oneCursor = tree.select('one');
it('should be possible to create subcursors.', function() {
const sub = oneCursor.select(['subtwo', 'colors']);
assert.deepEqual(sub.get(), state.one.subtwo.colors);
});
it('should be possible to go up.', function() {
const parent = colorCursor.up();
assert.deepEqual(parent!.get(), state.one.subtwo);
});
it('a cusor going up to root cannot go higher and returns null.', function() {
const up = tree.select('one').up(),
upper = up!.up();
assert.strictEqual(upper, null);
});
it('should be possible to go left.', function() {
const left = colorCursor.select(1).left();
assert.strictEqual(left!.get(), 'blue');
assert.strictEqual(left!.left(), null);
assert.throws(function() {
colorCursor.left();
}, /left/);
});
it('should be possible to go right.', function() {
const right = colorCursor.select(0).right();
assert.strictEqual(right!.get(), 'yellow');
assert.strictEqual(right!.right(), null);
assert.throws(function() {
colorCursor.right();
}, /right/);
});
it('should be possible to descend.', function() {
const list = tree.select('list');
assert.deepEqual(list.down().get(), [1, 2]);
assert.strictEqual(colorCursor.down().get(), 'blue');
assert.strictEqual(colorCursor.down().up()!.up()!.select('colors').down().get(), 'blue');
assert.strictEqual(list.down().right()!.down().right()!.get(), 4);
assert.throws(function() {
oneCursor.down();
}, /down/);
});
it('should be possible to get to the leftmost item of a list.', function() {
const listItem = tree.select('longList', 2);
assert.strictEqual(listItem.get(), 3);
assert.strictEqual(listItem.leftmost()!.get(), 1);
});
it('should be possible to get to the rightmost item of a list.', function() {
const listItem = tree.select('longList', 2);
assert.strictEqual(listItem.get(), 3);
assert.strictEqual(listItem.rightmost()!.get(), 4);
});
it('should be possible to iterate over an array.', function() {
const result = [];
for (const i of colorCursor) {
result.push(i.get());
}
assert.deepEqual(result, state.one.subtwo.colors);
assert.throws(function() {
for (const i of oneCursor) {
result.push(i);
}
}, /non-list/);
});
it('should be possible to map an array.', function() {
let count = 0;
const array = colorCursor.map(function(this: Cursor, cursor, i) {
assert(this === colorCursor);
assert(count++ === i);
return cursor;
});
assert.deepEqual(
array.map(c => c.get()),
state.one.subtwo.colors
);
const scope = {hello: 'world'};
colorCursor.map(function(this: {}) {
assert(this === scope);
}, scope);
assert.throws(function() {
// @ts-ignore
oneCursor.map(Function.prototype);
}, /non-list/);
});
it('should be supported correctly with dynamic cursors.', function() {
const cursor = tree.select('one', 'subtwo', 'colors', {id: 23});
assert.deepEqual(cursor.up()!.path, ['one', 'subtwo', 'colors']);
assert.deepEqual(cursor.select('test').path, ['one', 'subtwo', 'colors', {id: 23}, 'test']);
assert.deepEqual(cursor.root().get(), tree.root.get());
assert.throws(function() {
cursor.left();
}, /left/);
assert.throws(function() {
cursor.right();
}, /right/);
assert.throws(function() {
cursor.down();
}, /down/);
assert.throws(function() {
cursor.leftmost();
}, /leftmost/);
assert.throws(function() {
cursor.rightmost();
}, /rightmost/);
assert.throws(function() {
// @ts-ignore
cursor.map();
}, /map/);
});
});
/**
* History
*/
describe('History', function() {
it('should be possible to record updates.', function() {
const tree = new Baobab({item: 1}, {asynchronous: false}),
cursor = tree.select('item');
assert(!cursor.state.recording);
assert(!cursor.hasHistory());
assert.deepEqual(cursor.getHistory(), []);
cursor.startRecording(5);
assert(cursor.state.recording);
[1, 2, 3, 4, 5, 6].forEach(function() {
cursor.apply(e => e + 1);
});
assert(cursor.hasHistory());
assert.strictEqual(cursor.get(), 7);
assert.deepEqual(cursor.getHistory(), [2, 3, 4, 5, 6].reverse());
cursor.stopRecording();
cursor.clearHistory();
assert(!cursor.state.recording);
assert(!cursor.hasHistory());
assert.deepEqual(cursor.getHistory(), []);
});
it('should throw an error if trying to undo a recordless cursor.', function() {
const tree = new Baobab({item: 1}, {asynchronous: false}),
cursor = tree.select('item');
assert.throws(function() {
cursor.undo();
}, /recording/);
});
it('should be possible to go back in time.', function() {
const tree = new Baobab({item: 1}, {asynchronous: false}),
cursor = tree.select('item');
cursor.startRecording(5);
[1, 2, 3, 4, 5, 6].forEach(function() {
cursor.apply(e => e + 1);
});
assert.strictEqual(cursor.get(), 7);
cursor.undo();
assert.strictEqual(cursor.get(), 6);
assert.deepEqual(cursor.getHistory(), [2, 3, 4, 5].reverse());
cursor.undo().undo();
assert.strictEqual(cursor.get(), 4);
assert.deepEqual(cursor.getHistory(), [2, 3].reverse());
cursor.set(5);
cursor.set(6);
cursor.undo(3);
assert.strictEqual(cursor.get(), 3);
assert.deepEqual(cursor.getHistory(), [2]);
assert.throws(function() {
cursor.undo(5);
}, /relevant/);
});
});
it('should be possible to restart a history after having stopped it before.', function() {
const tree = new Baobab({item: 1}, {asynchronous: false}),
cursor = tree.select('item');
cursor.startRecording();
cursor.stopRecording();
cursor.startRecording();
cursor.set(2);
assert(cursor.state.recording);
assert.deepEqual(cursor.getHistory(), [1]);
});
/**
* Advanced issues
*/
describe('Advanced', function() {
it('should be possible to execute several orders within a single stack.', function(done) {
const tree = new Baobab({
one: 'coco',
two: 'koko'
});
tree.set('one', 'cece');
tree.set('two', 'keke');
setTimeout(function() {
assert.deepEqual(tree.get(), {one: 'cece', two: 'keke'});
done();
}, 0);
});
it('should be possible to merge push-like specifications.', function(done) {
const tree = new Baobab({list: [1]}),
cursor = tree.select('list');
cursor.push(2);
cursor.push(3);
cursor.unshift(-1);
cursor.concat([4, 5]);
setTimeout(function() {
assert.deepEqual(cursor.get(), [-1, 1, 2, 3, 4, 5]);
done();
}, 0);
});
it('an upper set should correctly resolve.', function(done) {
const tree = new Baobab({hello: {color: 'blue'}});
tree.select('hello', 'color').set('yellow');
tree.set('hello', 'purple');
tree.on('update', function() {
assert.deepEqual(tree.get(), {hello: 'purple'});
done();
});
});
it('a $set/$apply conflict should correctly resolve.', function(done) {
const tree = new Baobab({number: 1});
tree.set('number', 2);
tree.update(['number'], {type: 'apply', value: (x: number) => x + 2});
tree.on('update', function() {
assert.strictEqual(tree.get('number'), 4);
done();
});
});
it('should be possible to set a nested key on a primitive path.', function() {
const tree = new Baobab({
hello: 42
}, {asynchronous: false});
tree.set(['hello', 'cowabunga'], 43);
assert.deepEqual(tree.get(), {
hello: {
cowabunga: 43
}
});
});
});
}); | the_stack |
import app, { Component } from '../src/apprun';
describe('Stateful Component', () => {
it('should pass props to the constructor', () => {
class Child extends Component {
view = (state) => {
return <div>{state}</div>
}
constructor({ n }) {
super();
expect(n).toBe('7');
}
}
class Main extends Component {
view = (state) => {
return <div>
<Child n='7' />
</div>
}
}
new Main().start();
});
it('should set props as state', () => {
class Child extends Component {
view = (state) => {
expect(state.n).toBe('8');
return <div>{state.n}</div>
}
}
class Main extends Component {
view = (state) => {
return <div>
<Child n='8'/>
</div>
}
}
new Main().start();
});
it('should render children', () => {
class Child extends Component {
view = (state) => <div>
{state.children}
</div>
}
class Main extends Component {
view = (state) => {
return <div>
<Child>
<p>child</p>
</Child>
</div>
}
}
const element = document.createElement('div');
app.render(element, <Main />);
expect(element.textContent).toBe('child');
});
it('should call mounted function when created', (done) => {
class Child extends Component {
view = (state) => {
return <div>{state.n}</div>
}
mounted = (props) => {
expect(props.n).toBe(0);
done()
}
}
class Main extends Component {
state = 0
view = (state) => {
return <div>
<Child n={0}/>
</div>
}
}
new Main().start();
});
it('should call rendered function when created', (done) => {
class Child extends Component {
view = (state) => {
return <div>{state.n}</div>
}
rendered = (state) => {
expect(state.n).toBe(0);
done()
}
}
class Main extends Component {
state = 0
view = (state) => {
return <div>
<Child n={0} />
</div>
}
}
const element = document.createElement('div');
app.render(element, <Main />);
});
it('should call mounted function when refreshed', (done) => {
class Child extends Component {
view = (state) => {
return <div>{state.n}</div>
}
mounted = (props) => {
if (props.n === 1) done();
}
}
class Main extends Component {
state = 0
view = (state) => {
return <div>
<Child n={state}/>
</div>
}
update = {
'+1': state => state + 1
}
}
const component = new Main().start();
component.run('+1');
});
it('should allow mounted function to call this.run', () => {
class Main extends Component {
state = 0
view = (state) => {
return <div>{state}</div>
}
update = {
'+1': state => state + 1
}
mounted = () => {
this.run('+1');
}
}
const element = document.createElement("div");
app.render(element, <Main />);
expect(element.textContent).toBe("1");
});
it('should allow async event inside the mounted function', (done) => {
class Child3 extends Component {
view = (state) => {
return <div>{state.n}</div>
}
mounted = (n) => {
if (n !== this.state) this.run('init-async', n);
}
update = {
'init-async': async (state, value) =>
new Promise(resolve =>
setTimeout(() =>
resolve(value))
)
}
}
class Main extends Component {
view = (state) => {
return <div>
<Child3 n='a' />
</div>
}
}
const element = document.createElement('div');
document.body.appendChild(element);
app.render(element, <Main />);
setTimeout(() => {
expect(element.textContent).toEqual('a');
done();
},10)
});
it('should allow mounted function return a new state', (done) => {
class Main extends Component {
state = 10
view = (state) => {
return <div>{state}</div>
}
update = {
'+1': state => state + 1
}
mounted = () => 0
}
const element = document.createElement("div");
app.render(element, <Main />);
setTimeout(() => {
expect(element.textContent).toBe("0");
expect(element.firstChild['_component'].state).toBe(0);
done();
}, 20)
});
it('should allow mounted function return void', () => {
class Main extends Component {
state = 0
view = (state) => {
return <div>{state}</div>
}
update = {
'+1': state => state + 1
}
mounted = () => {
}
}
const element = document.createElement("div");
app.render(element, <Main />);
expect(element.textContent).toBe("0");
});
it('should allow Promise returned from the mounted function', (done) => {
class Child4 extends Component {
view = (state) => {
return <div>{state}</div>
}
mounted = ({ n }) => {
return new Promise(resolve =>
setTimeout(() => resolve(n +'!'))
)
}
}
class Main extends Component {
view = (state) => {
return <div>
<Child4 n='a' />
</div>
}
}
const element = document.createElement('div');
document.body.appendChild(element);
app.render(element, <Main />);
setTimeout(() => {
expect(element.textContent).toEqual('a!');
done();
}, 100)
});
it('should allow async mounted function and retain state after parent refresh', (done) => {
class Child extends Component {
state = 0
view = (state) => {
return <div>{state}</div>
}
update = {
'@child_event': () => { app.run('@@_pranet_event') }
}
mounted = async () => {
if (this.state == 0) return new Promise(resolve =>
setTimeout(() => resolve(100)))
}
}
class Main extends Component {
view = () => <Child />;
update = {
'@@_pranet_event': state => state
}
}
const element = document.createElement("div");
app.render(element, <Main />);
app.run('@child_event');
setTimeout(() => {
expect(element.textContent).toBe("100");
done();
}, 20);
});
it('should off all events after unmount', () => {
class Ch extends Component {
update = {
'#1': state=> state,
'#2': state => state,
'1': state=> state,
'2': state=> state,
}
}
const component = new Ch().mount();
expect(app['_events']['#1'].length).toBe(1);
expect(app['_events']['#2'].length).toBe(1);
expect(component['_app']['_events']['1'].length).toBe(1);
expect(component['_app']['_events']['2'].length).toBe(1);
component.unmount();
expect(app['_events']['#1'].length).toBe(0);
expect(app['_events']['#2'].length).toBe(0);
expect(component['_app']['_events']['1'].length).toBe(0);
expect(component['_app']['_events']['2'].length).toBe(0);
});
it('should share same instance when refresh', (done) => {
class Child extends Component {
state = { n: 0 }
view = (state) => {
return <div>{state.n}</div>
}
mounted = ({ n }) => {
// on second refresh, the state should retain
if (n === 2 && this.state.n === 1) done();
this.state.n = n;
}
}
class Main extends Component {
state = 0
view = (state) => {
return <div>
<Child n={state}/>
</div>
}
update = {
'+1': state => state + 1
}
}
const element = document.createElement('div');
const component = new Main().start(element);
component.run('+1'); // triger a refresh
component.run('+1'); // triger a refresh
});
it('should not share the same instance', () => {
class Child extends Component {
state = { n: 0 }
view = (state) => {
return <div>{state.n}</div>
}
constructor({n}) {
super();
this.state.n = n;
}
}
class Main extends Component {
state = 0
view = (state) => {
return <div>
<Child n="1" />
<div>
<Child n="2"/>
</div>
<Child n="3"/>
</div>
}
}
const element = document.createElement('div');
app.render(element, <Main />);
expect(element.textContent).toBe("123");
});
it("should set props to child component's div wrapper", () => {
class Child extends Component {
}
class Main extends Component {
view = _ => <>
<Child class="c1" id="c1" />
<Child className="c2" />
<Child style={{ 'color': 'red' }} />
<Child data-a='a' />
</>;
}
const element = document.createElement('div');
app.render(element, <Main />);
const main = element.children[0];
expect(main.children[0].id).toBe("c1");
expect(main.children[0].className).toBe("c1");
expect(main.children[1].className).toBe("c2");
expect((main.children[2] as HTMLDivElement).style.color).toBe("red");
expect((main.children[3] as HTMLDivElement).dataset.a).toBe("a");
});
// deprecated props should only be in mounted
// it("should get props in view function", () => {
// class Child extends Component {
// view = (_, props) => {
// expect(props['class']).toBe('c1');
// }
// }
// class Main extends Component {
// view = _ => <Child class="c1" id="c1" />
// }
// const element = document.createElement('div');
// app.render(element, <Main />);
// });
it("should support as prop", () => {
class Child extends Component {
view = state => state;
}
class Main extends Component {
view = _ => <Child as="h3" />
}
const element = document.createElement('div');
app.render(element, <Main as="h2"/>);
expect(element.firstElementChild.tagName).toBe('H2');
expect(element.firstElementChild.firstElementChild.tagName).toBe('H3');
});
it("should refresh", () => {
class Child extends Component {
state = 0;
view = state => state;
update = {
'@refresh': (_, state) => state
}
}
class Main extends Component {
view = _ => <Child />
}
const element = document.createElement('div');
app.render(element, <Main />);
app.run('@refresh', 'aaa')
expect(element.firstElementChild.firstElementChild.innerHTML).toBe('aaa');
});
}); | the_stack |
import * as fs from "fs";
import * as ltx from "ltx";
import * as tl from 'azure-pipelines-task-lib/task';
import * as Q from 'q';
import * as auth from 'packaging-common/nuget/Authentication';
import { NuGetConfigHelper2 } from 'packaging-common/nuget/NuGetConfigHelper2';
import * as ngRunner from 'packaging-common/nuget/NuGetToolRunner2';
import * as path from 'path';
import { IExecOptions } from 'azure-pipelines-task-lib/toolrunner';
import * as request from 'request';
export async function run(): Promise<void> {
const buildIdentityDisplayName: string = null;
const buildIdentityAccount: string = null;
try {
let packageName: string = tl.getInput("packageName");
let packageVersion: string = tl.getInput("version") || "";
if (!packageName || packageName.indexOf("/") < 0)
{
throw Error(tl.loc('Error_InvalidPackageName'));
}
else {
packageName = packageName.split("/")[1];
}
let endpointNames = tl.getDelimitedInput("externalEndpoints", ',');
const username: string = await GetGitHubUser(endpointNames[0]); // we will always have a single connection
const noCache = true;
const verbosity = "minimal";
let packagesDirectory = tl.getPathInput('packagesDirectory');
if (!tl.filePathSupplied('packagesDirectory')) {
packagesDirectory = null;
}
const externalAuthArr: auth.ExternalAuthInfo[] = GetExternalAuthInfoArray('externalEndpoints', username);
const authInfo = new auth.NuGetExtendedAuthInfo(null, externalAuthArr);
// Setting up sources, either from provided config file or from feed selection
tl.debug('Setting up sources');
// If there was no nuGetConfigPath, NuGetConfigHelper will create one
const nuGetConfigHelper = new NuGetConfigHelper2(
null,
null,
authInfo,
{ credProviderFolder: null, extensionsDisabled: true },
getTempNuGetConfigPath() /* tempConfigPath */,
false /* useNugetToModifyConfigFile */);
let credCleanup = () => { return; };
const sources: Array<auth.IPackageSource> = new Array<auth.IPackageSource>();
let feedUri: string = "https://nuget.pkg.github.com/" + username + "/index.json";
sources.push(<auth.IPackageSource>
{
feedName: "github",
feedUri: feedUri,
isInternal: false
});
// Creating NuGet.config for the user
if (sources.length > 0) {
tl.debug(`Adding the following sources to the config file: ${sources.map(x => x.feedName).join(';')}`);
nuGetConfigHelper.addSourcesToTempNuGetConfig(sources);
credCleanup = () => {
tl.rmRF(nuGetConfigHelper.tempNugetConfigPath);
};
} else {
tl.debug('No sources were added to the temp NuGet.config file');
}
// Setting creds in the temp NuGet.config if needed
nuGetConfigHelper.setAuthForSourcesInTempNuGetConfig();
const configFile = nuGetConfigHelper.tempNugetConfigPath;
nuGetConfigHelper.backupExistingRootNuGetFiles();
const dotnetPath = tl.which('dotnet', true);
let projectFiles = CreateProjectFiles(configFile);
try {
for (const projectFile of projectFiles) {
await dotnetAddAsync(dotnetPath, projectFile, packageName, packageVersion, configFile);
await dotNetRestoreAsync(dotnetPath, projectFile, packagesDirectory, configFile, noCache, verbosity);
}
} finally {
credCleanup();
nuGetConfigHelper.restoreBackupRootNuGetFiles();
}
tl.setResult(tl.TaskResult.Succeeded, tl.loc('PackagesInstalledSuccessfully'));
} catch (err) {
tl.error(err);
if (buildIdentityDisplayName || buildIdentityAccount) {
tl.warning(tl.loc('BuildIdentityPermissionsHint', buildIdentityDisplayName, buildIdentityAccount));
}
tl.setResult(tl.TaskResult.Failed, tl.loc('PackageFailedToInstall'));
}
}
function getTempNuGetConfigPath(): string {
const tempNuGetConfigBaseDir = tl.getVariable("Agent.BuildDirectory") || tl.getVariable("Agent.TempDirectory");
const tempNuGetConfigFileName = "nuget.config";
return path.join(tempNuGetConfigBaseDir, "Nuget", "dotnet", tempNuGetConfigFileName);
}
function CreateProjectFiles(configPath: string): string[] {
let projectFilePath: string = getProjectFilePath(configPath);
tl.writeFile(projectFilePath, "<Project Sdk=\"Microsoft.NET.Sdk\" />");
updateXmlFile(projectFilePath, (xml: any): any => {
if (xml) {
if (xml.getName().toLowerCase() !== "project") {
throw Error("Expected project element");
}
let xmlPropertyGroup = getOrCreateLastElement(xml, "PropertyGroup");
let xmlOutputType = xmlPropertyGroup.c("OutputType");
let xmlTargetFramework = xmlPropertyGroup.c("TargetFramework");
let xmlOutputTypeTxt = xmlOutputType.t("Exe");
let xmlTargetFrameworkTxt = xmlTargetFramework.t("netcoreapp2.1");
}
return xml;
});
return [projectFilePath];
}
function updateXmlFile(xmlPath: string, updateFn: (xml: any) => any): void {
let xmlString = fs.readFileSync(xmlPath).toString();
// strip BOM; xml parser doesn't like it
if (xmlString.charCodeAt(0) === 0xFEFF) {
xmlString = xmlString.substr(1);
}
let xml = ltx.parse(xmlString);
xml = updateFn(xml);
fs.writeFileSync(xmlPath, xml.root().toString());
}
function getProjectFilePath(configPath: string): string {
const tempNuGetConfigBaseDir = path.dirname(configPath);
const tempNuGetConfigFileName = "tempCsproj_" + tl.getVariable("build.buildId") + ".csproj";
return path.join(tempNuGetConfigBaseDir, tempNuGetConfigFileName);
}
/**
* Gets the last element in xml that matches elementName. If no existing element is found,
* one will be created on the root of xml
* @param xml Xml Element to search
* @param elementName Element name to return or create
*/
function getOrCreateLastElement(xml: any, elementName: string): any {
if (xml) {
let xmlElements = xml.getChildren(elementName);
if (!xmlElements || xmlElements.length === 0) {
xmlElements = [xml.c(elementName)];
}
return xmlElements[xmlElements.length - 1];
}
}
function GetExternalAuthInfoArray(inputKey: string, username: string): auth.ExternalAuthInfo[]
{
let externalAuthArray: auth.ExternalAuthInfo[] = [];
let endpointNames = tl.getDelimitedInput(inputKey, ',');
if (!endpointNames || endpointNames.length === 0)
{
return externalAuthArray;
}
endpointNames.forEach((endpointName: string) => {
let externalAuth = tl.getEndpointAuthorization(endpointName, true);
let scheme = tl.getEndpointAuthorizationScheme(endpointName, true).toLowerCase();
let token = "";
switch(scheme) {
case "token":
token = externalAuth.parameters["AccessToken"];
tl.debug("adding token auth entry for feed GitHub");
externalAuthArray.push(new auth.TokenExternalAuthInfo(<auth.IPackageSource>
{
feedName: "github",
feedUri: "https://nuget.pkg.github.com/" + username + "/index.json"
},
token));
break;
case "personalaccesstoken":
token = externalAuth.parameters["accessToken"];
tl.debug("adding token auth entry for feed GitHub");
externalAuthArray.push(new auth.TokenExternalAuthInfo(<auth.IPackageSource>
{
feedName: "github",
feedUri: "https://nuget.pkg.github.com/" + username + "/index.json"
},
token));
break;
case "usernamepassword":
case "none":
break;
default:
break;
}
});
return externalAuthArray;
}
function GetGitHubUser(endpointId: string): Promise<string> {
let externalAuth = tl.getEndpointAuthorization(endpointId, true);
let scheme = tl.getEndpointAuthorizationScheme(endpointId, true).toLowerCase();
if (!(scheme == "token" || scheme == "personalaccesstoken")) {
return new Promise((resolve, reject) => {
resolve("");
});
}
let token = "";
if (scheme == "token") {
token = externalAuth.parameters["AccessToken"];
} else if (scheme == "personalaccesstoken") {
token = externalAuth.parameters["accessToken"];
}
var url = "https://api.github.com/user";
return new Promise((resolve, reject) => {
request.get({
url : url,
headers : {
"Authorization": "Token " + token,
"User-Agent": "azure-pipelines"
}
}, function(error, response, body) {
if (error) reject(error);
let responseJson = JSON.parse(body);
resolve(responseJson["login"]);
});
});
}
function dotnetAddAsync(dotnetPath: string, projectFile: string, packageName: string, version: string, configFile: string): Q.Promise<number> {
const dotnet = tl.tool(dotnetPath);
dotnet.arg('add');
if (projectFile) {
dotnet.arg(projectFile);
}
if (packageName) {
dotnet.arg('package');
dotnet.arg(packageName);
}
if (version) {
dotnet.arg('-v');
dotnet.arg(version);
}
dotnet.arg('-n');
const envWithProxy = ngRunner.setNuGetProxyEnvironment(process.env, configFile, null);
return dotnet.exec({ cwd: path.dirname(projectFile), env: envWithProxy } as IExecOptions);
}
function dotNetRestoreAsync(dotnetPath: string, projectFile: string, packagesDirectory: string, configFile: string, noCache: boolean, verbosity: string): Q.Promise<number> {
const dotnet = tl.tool(dotnetPath);
dotnet.arg('restore');
if (projectFile) {
dotnet.arg(projectFile);
}
if (packagesDirectory) {
dotnet.arg('--packages');
dotnet.arg(packagesDirectory);
}
dotnet.arg('--configfile');
dotnet.arg(configFile);
if (noCache) {
dotnet.arg('--no-cache');
}
if (verbosity && verbosity !== '-') {
dotnet.arg('--verbosity');
dotnet.arg(verbosity);
}
const envWithProxy = ngRunner.setNuGetProxyEnvironment(process.env, configFile, null);
return dotnet.exec({ cwd: path.dirname(projectFile), env: envWithProxy } as IExecOptions);
} | the_stack |
import * as pubSub from './shared/pubSub';
import { logDelete, logGet, logSet } from './shared/debug';
import state from './shared/state';
import * as utils from './shared/utils';
import * as paths from './shared/paths';
import { ArrayMembers, MapOrSetMembers, ORIGINAL } from './shared/constants';
import { PropPath, Target } from './shared/types';
/**
* Add a new listener to be notified when a particular value in the store changes
* To be used when a component reads from a property.
*/
const addListener = (propPath: PropPath) => {
if (!state.currentComponent) return;
// We use a string instead of an array because it's much easier to match
const pathString = paths.makeInternalString(propPath);
const components = state.listeners.get(pathString) || new Set();
components.add(state.currentComponent);
state.listeners.set(pathString, components);
};
/**
* These are the proxy handlers. Notes:
* * We have different handlers (different traps) for object/array and
* map/set.
* * When the proxy is muted, use Reflect[trap] and bypass any logic. The
* exception is Map/Set methods, where we must bind `this` first
* * `ORIGINAL` lets us unwrap a proxied object
* * We redirect to the 'next version' of a target if it has been changed
*/
export const getHandlerForObject = <T extends Target>(
targetObject: T
): ProxyHandler<T> => {
if (utils.isMap(targetObject) || utils.isSet(targetObject)) {
// Map() and Set() get a special handler, because reads and writes all
// happen in the get() trap (different to the get() method of the map/set!)
return {
get(target, prop) {
if (prop === ORIGINAL) return target;
let result = Reflect.get(target, prop);
// The innards of Map and Set require this binding
if (utils.isFunction(result)) result = result.bind(target);
// Bail early for some things. Unlike objects/arrays, we will
// continue on even if !state.currentComponent
if (state.proxyIsMuted || utils.isInternal(prop)) {
return result;
}
if (!state.currentComponent && state.redirectToNext) {
const nextVersion = state.nextVersionMap.get(target);
if (nextVersion) return Reflect.get(nextVersion, prop);
}
// Adding to a Map
if (prop === MapOrSetMembers.Set) {
const handler: ProxyHandler<() => any> = {
apply(func, applyTarget, [key, value]) {
if (applyTarget.get(key) === value) return true; // No change, no need to carry on
return pubSub.dispatchUpdateInNextStore({
target: applyTarget,
prop: key,
value,
updater: (finalTarget, newProxiedValue) => {
logSet(target, prop, newProxiedValue);
// We call the map.set() now, but on the item in the
// store, and with the new args
return Reflect.apply(finalTarget[prop], finalTarget, [
key,
newProxiedValue,
]);
},
});
},
};
return new Proxy(result, handler);
}
// Adding to a Set
if (prop === MapOrSetMembers.Add) {
const handler: ProxyHandler<() => any> = {
apply(func, applyTarget, [value]) {
if (applyTarget.has(value)) return true; // Would be a no op
return pubSub.dispatchUpdateInNextStore({
target: applyTarget,
notifyTarget: true,
value,
updater: (finalTarget, newProxiedValue) => {
logSet(target, prop, newProxiedValue);
return Reflect.apply(finalTarget[prop], finalTarget, [
newProxiedValue,
]);
},
});
},
};
return new Proxy(result, handler);
}
// On either a Set or Map
if (prop === MapOrSetMembers.Clear || prop === MapOrSetMembers.Delete) {
const handler: ProxyHandler<() => any> = {
apply(func, applyTarget, [key]) {
if (
!applyTarget.size ||
(prop === MapOrSetMembers.Delete && !applyTarget.has(key))
) {
return false; // false indicates no change
}
return pubSub.dispatchUpdateInNextStore({
target: applyTarget,
notifyTarget: true,
updater: (finalTarget) => {
logSet(target, prop);
return Reflect.apply(finalTarget[prop], finalTarget, [key]);
},
});
},
};
return new Proxy(result, handler);
}
// Now that we've handled any modifying methods, we can
// just return the result if we're not in the render cycle.
if (!state.currentComponent) return result;
// If we're reading a particular value, we'll want a listener for that
// We don't listen on `.has` because any change that would result in
// `.has` returning a different value would update the target
if (utils.isMap(target) && prop === MapOrSetMembers.Get) {
const handler: ProxyHandler<() => any> = {
apply(func, applyTarget, args) {
addListener(paths.extend(target, args[0]));
return Reflect.apply(func, applyTarget, args);
},
};
return new Proxy(result, handler);
}
// For all other read operations, just return
return result;
},
};
}
return {
get(target, prop) {
// This allows getting the un-proxied version of a proxied object
if (prop === ORIGINAL) return target;
const result = Reflect.get(target, prop);
if (state.proxyIsMuted || utils.isInternal(prop)) return result;
// Mutating array methods make a lot of noise, so we wrap them in a proxy
// Only one update will be fired - for the actual array.
if (utils.isArrayMutation(target, prop)) {
const handler: ProxyHandler<() => {}> = {
apply(func, applyTarget, args) {
return pubSub.dispatchUpdateInNextStore({
target: applyTarget,
notifyTarget: true,
value: args,
updater: (finalTarget, proxiedArgs) => {
logSet(target, prop, proxiedArgs);
const updateResult = Reflect.apply(
// @ts-ignore - Yes, symbol CAN be used as an index type
finalTarget[prop],
finalTarget,
proxiedArgs
);
const rootPath = paths.get(target);
// At this point, the array is updated. But the paths of the
// items could be wrong, so we refresh them.
utils.updateDeep(finalTarget, (item, path) => {
if (utils.isTarget(item)) {
paths.addProp(item, [...rootPath, ...path]);
}
});
return updateResult;
},
});
},
};
return new Proxy(result, handler);
}
// For all other methods (.join, .toString(), etc) return the function
// @ts-ignore - wrong, symbol can be used an an index type
if (utils.isFunction(target[prop])) return result;
if (state.currentComponent) {
// We record a get if a component is rendering, with the exception
// of reading array length. This would be redundant, since changes to
// length trigger a change on the array itself
if (!(utils.isArray(target) && prop === ArrayMembers.Length)) {
logGet(target, prop, result);
addListener(paths.extend(target, prop));
}
} else if (state.redirectToNext) {
// When we're outside the render cycle, we route
// requests to the 'next version'
// Note, this will result in another get(), but on the equivalent
// target from the next store. muteProxy will be set so this line
// isn't triggered in an infinite loop
const nextVersion = state.nextVersionMap.get(target);
if (nextVersion) return Reflect.get(nextVersion, prop);
}
return result;
},
has(target, prop) {
const result = Reflect.has(target, prop);
if (state.proxyIsMuted || utils.isInternal(prop)) return result;
if (state.currentComponent) {
// Arrays use `has` too, but we capture a listener elsewhere for that.
// Here we only want to capture access to objects
if (!utils.isArray(target)) {
logGet(target, prop);
addListener(paths.extend(target, prop));
}
} else {
const nextVersion = state.nextVersionMap.get(target);
if (nextVersion) return Reflect.has(nextVersion, prop);
}
return result;
},
ownKeys(target) {
const result = Reflect.ownKeys(target);
if (state.proxyIsMuted) return result;
if (state.currentComponent) {
logGet(target);
addListener(paths.get(target));
} else {
const nextVersion = state.nextVersionMap.get(target);
if (nextVersion) return Reflect.ownKeys(nextVersion);
}
return result;
},
set(target, prop, value) {
if (state.proxyIsMuted) return Reflect.set(target, prop, value);
if (process.env.NODE_ENV !== 'production') {
if (state.currentComponent) {
console.error(
[
`You are attempting to modify the store during a render cycle. `,
`(You're setting "${prop.toString()}" to "${value}" somewhere)\n`,
`This could result in subtle bugs. `,
`If you're changing the store in componentDidMount, wrap your `,
`code in a setTimeout() to allow the render cycle to complete `,
`before changing the store.`,
].join('')
);
}
}
// If there's no change, we return
// @ts-ignore - target[prop] is fine
if (target[prop] === value) return true;
return pubSub.dispatchUpdateInNextStore({
target,
prop,
value,
updater: (finalTarget, newValueProxy) => {
logSet(target, prop, newValueProxy);
return Reflect.set(finalTarget, prop, newValueProxy);
},
});
},
deleteProperty(target, prop) {
if (state.proxyIsMuted) return Reflect.deleteProperty(target, prop);
return pubSub.dispatchUpdateInNextStore({
target,
prop,
notifyTarget: true,
updater: (finalTarget) => {
logDelete(target, prop);
return Reflect.deleteProperty(finalTarget, prop);
},
});
},
};
};
/**
* Wrap an item in a proxy
*/
export const createShallow = <T extends any>(target: T): T => {
if (process.env.NODE_ENV !== 'production') {
if (!target) throw Error('There is no target');
}
return new Proxy(target as Target, getHandlerForObject(target)) as T;
}; | the_stack |
import test from 'ava';
import {JsonSerialize} from '../src/decorators/JsonSerialize';
import {JsonDeserialize} from '../src/decorators/JsonDeserialize';
import {JsonClassType} from '../src/decorators/JsonClassType';
import {ObjectMapper} from '../src/databind/ObjectMapper';
import {JsonProperty} from '../src/decorators/JsonProperty';
test('@JsonSerialize and @JsonDeserialize at class level', t => {
// eslint-disable-next-line no-shadow
@JsonSerialize({using: (user: User, context) => ({
otherInfo: 'other info',
...user
})})
// eslint-disable-next-line no-shadow
@JsonDeserialize({using: (user: any, context) => {
delete user.otherInfo;
return user;
}})
class User {
@JsonProperty() @JsonClassType({type: () => [Number]})
id: number;
@JsonProperty() @JsonClassType({type: () => [String]})
email: string;
@JsonProperty() @JsonClassType({type: () => [String]})
firstname: string;
@JsonProperty() @JsonClassType({type: () => [String]})
lastname: string;
constructor(id: number, email: string, firstname: string, lastname: string) {
this.id = id;
this.email = email;
this.firstname = firstname;
this.lastname = lastname;
}
}
const user = new User(1, 'john.alfa@gmail.com', 'John', 'Alfa');
const objectMapper = new ObjectMapper();
const jsonData = objectMapper.stringify<User>(user);
// eslint-disable-next-line max-len
t.deepEqual(JSON.parse(jsonData), JSON.parse('{"otherInfo":"other info","id":1,"email":"john.alfa@gmail.com","firstname":"John","lastname":"Alfa"}'));
const userParsed = objectMapper.parse<User>(jsonData, {mainCreator: () => [User]});
t.assert(userParsed instanceof User);
t.is(userParsed.id, 1);
t.is(userParsed.email, 'john.alfa@gmail.com');
t.is(userParsed.firstname, 'John');
t.is(userParsed.lastname, 'Alfa');
t.assert(!Object.hasOwnProperty.call(userParsed, 'otherInfo'));
});
test('@JsonSerialize and @JsonDeserialize at property level', t => {
const customBookListSerializer = (books: Book[], context) =>
// eslint-disable-next-line no-shadow
books.map((book) => new Book(book.id, book.name, book.date, null));
class DateSerializer {
static serializeDate(date, context): any {
return {
year: date.getFullYear(),
month: date.getMonth() + 1,
day: date.getDate(),
formatted: date.toLocaleDateString()
};
}
static deserializeDate(dateObj, context): Date {
return new Date(dateObj.formatted);
}
}
class Book {
@JsonProperty() @JsonClassType({type: () => [Number]})
id: number;
@JsonProperty() @JsonClassType({type: () => [String]})
name: string;
@JsonProperty()
@JsonSerialize({using: DateSerializer.serializeDate})
@JsonDeserialize({using: DateSerializer.deserializeDate})
@JsonClassType({type: () => [Date]})
date: Date;
@JsonProperty()
@JsonClassType({type: () => [Writer]})
writer: Writer;
// eslint-disable-next-line no-shadow
constructor(id: number, name: string, date: Date, @JsonClassType({type: () => [Writer]}) writer: Writer) {
this.id = id;
this.name = name;
this.date = date;
this.writer = writer;
}
}
class Writer {
@JsonProperty() @JsonClassType({type: () => [Number]})
id: number;
@JsonProperty() @JsonClassType({type: () => [String]})
name: string;
@JsonProperty()
@JsonClassType({type: () => [Array, [Book]]})
@JsonSerialize({using: customBookListSerializer})
books: Book[] = [];
constructor(id: number, name: string) {
this.id = id;
this.name = name;
}
}
const writer = new Writer(1, 'George R. R. Martin');
const book = new Book(1, 'Game Of Thrones', new Date(Date.UTC(2012, 11, 4)), writer);
writer.books.push(book);
const objectMapper = new ObjectMapper();
const jsonData = objectMapper.stringify<Writer>(writer);
// eslint-disable-next-line max-len
t.deepEqual(JSON.parse(jsonData), JSON.parse('{"id":1,"name":"George R. R. Martin","books":[{"id":1,"name":"Game Of Thrones","writer":null,"date":{"year":2012,"month":12,"day":4,"formatted":"12/4/2012"}}]}'));
const writerParsed = objectMapper.parse<Writer>(jsonData, {mainCreator: () => [Writer]});
t.assert(writerParsed instanceof Writer);
t.assert(writerParsed.books.length === 1);
t.assert(writerParsed.books[0] instanceof Book);
t.assert(writerParsed.books[0].date instanceof Date);
});
test('@JsonSerialize and @JsonDeserialize at method level', t => {
const customBookListSerializer = (books: Book[], context) =>
// eslint-disable-next-line no-shadow
books.map((book) => {
const bookWithoutWriter = new Book();
bookWithoutWriter.id = book.id;
bookWithoutWriter.name = book.name;
bookWithoutWriter.date = book.date;
bookWithoutWriter.writer = null;
return bookWithoutWriter;
});
class DateSerializer {
static serializeDate(date, context): any {
return {
year: date.getFullYear(),
month: date.getMonth() + 1,
day: date.getDate(),
formatted: date.toLocaleDateString()
};
}
static deserializeDate(dateObj, context): Date {
return new Date(dateObj.formatted);
}
}
class Book {
@JsonProperty() @JsonClassType({type: () => [Number]})
id: number;
@JsonProperty() @JsonClassType({type: () => [String]})
name: string;
@JsonProperty()
@JsonClassType({type: () => [Date]})
date: Date;
@JsonProperty()
@JsonClassType({type: () => [Writer]})
writer: Writer;
@JsonProperty()
@JsonSerialize({using: DateSerializer.serializeDate})
@JsonClassType({type: () => [Date]})
getDate(): Date {
return this.date;
}
@JsonProperty()
@JsonDeserialize({using: DateSerializer.deserializeDate})
setDate(@JsonClassType({type: () => [Date]}) date: Date) {
this.date = date;
}
}
class Writer {
@JsonProperty() @JsonClassType({type: () => [Number]})
id: number;
@JsonProperty() @JsonClassType({type: () => [String]})
name: string;
@JsonProperty()
@JsonClassType({type: () => [Array, [Book]]})
books: Book[] = [];
constructor(id: number, name: string) {
this.id = id;
this.name = name;
}
@JsonProperty()
@JsonClassType({type: () => [Array, [Book]]})
@JsonSerialize({using: customBookListSerializer})
getBooks(): Book[] {
return this.books;
}
}
const writer = new Writer(1, 'George R. R. Martin');
const book = new Book();
book.id = 1;
book.name = 'Game Of Thrones';
book.date = new Date(Date.UTC(2012, 11, 4));
book.writer = writer;
writer.books.push(book);
const objectMapper = new ObjectMapper();
const jsonData = objectMapper.stringify<Writer>(writer);
// eslint-disable-next-line max-len
t.deepEqual(JSON.parse(jsonData), JSON.parse('{"id":1,"name":"George R. R. Martin","books":[{"id":1,"name":"Game Of Thrones","writer":null,"date":{"year":2012,"month":12,"day":4,"formatted":"12/4/2012"}}]}'));
const writerParsed = objectMapper.parse<Writer>(jsonData, {mainCreator: () => [Writer]});
t.assert(writerParsed instanceof Writer);
t.assert(writerParsed.books.length === 1);
t.assert(writerParsed.books[0] instanceof Book);
t.assert(writerParsed.books[0].date instanceof Date);
});
test('@JsonDeserialize at parameter level', t => {
class Company {
@JsonProperty() @JsonClassType({type: () => [String]})
name: string;
@JsonProperty()
@JsonClassType({type: () => [Person]})
ceo: Person;
constructor(name: string,
@JsonDeserialize({using: (person: any, context) => {
delete person.otherInfo;
return person;
// eslint-disable-next-line no-shadow
}}) @JsonClassType({type: () => [Person]}) ceo: Person) {
this.name = name;
this.ceo = ceo;
}
}
@JsonSerialize({using: (person: Person, context) => ({
otherInfo: 'other info',
...person
})})
class Person {
@JsonProperty() @JsonClassType({type: () => [Number]})
id: number;
@JsonProperty() @JsonClassType({type: () => [String]})
email: string;
@JsonProperty() @JsonClassType({type: () => [String]})
firstname: string;
@JsonProperty() @JsonClassType({type: () => [String]})
lastname: string;
constructor(id: number, email: string, firstname: string, lastname: string) {
this.id = id;
this.email = email;
this.firstname = firstname;
this.lastname = lastname;
}
}
const ceo = new Person(1, 'john.alfa@gmail.com', 'John', 'Alfa');
const company = new Company('Google', ceo);
const objectMapper = new ObjectMapper();
const jsonData = objectMapper.stringify<Company>(company);
// eslint-disable-next-line max-len
t.deepEqual(JSON.parse(jsonData), JSON.parse('{"name":"Google","ceo":{"otherInfo":"other info","id":1,"email":"john.alfa@gmail.com","firstname":"John","lastname":"Alfa"}}'));
const companyParsed = objectMapper.parse<Company>(jsonData, {mainCreator: () => [Company]});
t.assert(companyParsed instanceof Company);
t.is(companyParsed.name, 'Google');
t.assert(companyParsed.ceo instanceof Person);
t.is(companyParsed.ceo.id, 1);
t.is(companyParsed.ceo.email, 'john.alfa@gmail.com');
t.is(companyParsed.ceo.firstname, 'John');
t.is(companyParsed.ceo.lastname, 'Alfa');
t.assert(!Object.hasOwnProperty.call(companyParsed.ceo, 'otherInfo'));
});
test('Custom serializers and deserializers', t => {
class Book {
@JsonProperty() @JsonClassType({type: () => [Number]})
id: number;
@JsonProperty() @JsonClassType({type: () => [String]})
name: string;
@JsonProperty()
@JsonClassType({type: () => [Date]})
date: Date;
@JsonProperty()
@JsonClassType({type: () => [Writer]})
writer: Writer;
// eslint-disable-next-line no-shadow
constructor(id: number, name: string, date: Date, writer: Writer) {
this.id = id;
this.name = name;
this.date = date;
this.writer = writer;
}
}
class Writer {
@JsonProperty() @JsonClassType({type: () => [Number]})
id: number;
@JsonProperty() @JsonClassType({type: () => [String]})
name: string;
@JsonProperty()
@JsonClassType({type: () => [Array, [Book]]})
books: Book[] = [];
constructor(id: number, name: string) {
this.id = id;
this.name = name;
}
}
const writer = new Writer(1, 'George R. R. Martin');
const book = new Book(1, 'Game Of Thrones', new Date(Date.UTC(2012, 11, 4)), writer);
writer.books.push(book);
writer.books.push(null);
writer.books.push(null);
const objectMapper = new ObjectMapper();
objectMapper.defaultStringifierContext.serializers.push({
mapper: (key, value: Book, context) => {
if (value != null) {
return {
id: value.id,
name: value.name,
date: {
year: value.date.getFullYear(),
month: value.date.getMonth() + 1,
day: value.date.getDate(),
formatted: value.date.toLocaleDateString()
},
writer: null
};
}
return value;
},
type: () => Book
});
objectMapper.defaultParserContext.deserializers.push({
mapper: (key, value: any, context) => {
if (value != null) {
return new Book(value.id, value.name, new Date(value.date.formatted), value.writer);
}
return value;
},
type: () => Book
});
const jsonData = objectMapper.stringify<Writer>(writer);
// eslint-disable-next-line max-len
t.deepEqual(JSON.parse(jsonData), JSON.parse('{"books":[{"id":1,"name":"Game Of Thrones","date":{"year":2012,"month":12,"day":4,"formatted":"12/4/2012"},"writer":null},null,null],"id":1,"name":"George R. R. Martin"}'));
const writerParsed = objectMapper.parse<Writer>(jsonData, {mainCreator: () => [Writer]});
t.assert(writerParsed instanceof Writer);
t.assert(writerParsed.books.length === 3);
t.assert(writerParsed.books[0] instanceof Book);
t.assert(writerParsed.books[0].date instanceof Date);
t.assert(writerParsed.books[1] === null);
t.assert(writerParsed.books[2] === null);
});
test('@JsonSerialize and @JsonDeserialize at property level with contentUsing and keyUsing option values', t => {
class Book {
@JsonProperty() @JsonClassType({type: () => [Number]})
id: number;
@JsonProperty() @JsonClassType({type: () => [String]})
name: string;
@JsonProperty()
@JsonClassType({type: () => [Date]})
date: Date;
@JsonProperty()
@JsonClassType({type: () => [Writer]})
writer: Writer;
// eslint-disable-next-line no-shadow
constructor(id: number, name: string, date: Date, writer: Writer) {
this.id = id;
this.name = name;
this.date = date;
this.writer = writer;
}
}
class Writer {
@JsonProperty() @JsonClassType({type: () => [Number]})
id: number;
@JsonProperty() @JsonClassType({type: () => [String]})
name: string;
@JsonProperty()
@JsonClassType({type: () => [Array, [Book]]})
@JsonSerialize({
// eslint-disable-next-line no-shadow
contentUsing: (book: Book, context) => {
// @ts-ignore
book.writerName = book.writer.name;
book.writer = null;
return book;
}
})
@JsonDeserialize({
// eslint-disable-next-line no-shadow
contentUsing: (book: any, context) => {
delete book.writerName;
return book;
}
})
books: Book[] = [];
@JsonProperty()
@JsonClassType({type: () => [Map, [String, String]]})
@JsonSerialize({
keyUsing: (key: string, context) => 'newMapKey-' + key,
contentUsing: (obj: string, context) => 'newMapValue: ' + obj
})
@JsonDeserialize({
keyUsing: (key: string, context) => key.replace('newMapKey-', ''),
contentUsing: (obj: string, context) => obj.replace('newMapValue: ', '')
})
otherInfoMap: Map<string, string> = new Map();
@JsonProperty()
@JsonClassType({type: () => [Object, [String, String]]})
@JsonSerialize({
keyUsing: (key: string, context) => 'newObjKey-' + key,
contentUsing: (obj: string, context) => 'newObjValue: ' + obj
})
@JsonDeserialize({
keyUsing: (key: string, context) => key.replace('newObjKey-', ''),
contentUsing: (obj: string, context) => obj.replace('newObjValue: ', '')
})
otherInfoObjLiteral: {phone?: string; address?: string} = {};
constructor(id: number, name: string) {
this.id = id;
this.name = name;
}
}
const writer = new Writer(1, 'George R. R. Martin');
writer.otherInfoMap.set('phone', '+393333111999');
writer.otherInfoMap.set('address', '123 Main Street, New York, NY 10030');
writer.otherInfoObjLiteral = {
address: '123 Main Street, New York, NY 10030',
phone: '+393333111999'
};
const book = new Book(1, 'Game Of Thrones', new Date(Date.UTC(2012, 11, 4)), writer);
writer.books.push(book);
const objectMapper = new ObjectMapper();
const jsonData = objectMapper.stringify<Writer>(writer);
// eslint-disable-next-line max-len
t.deepEqual(JSON.parse(jsonData), JSON.parse('{"books":[{"id":1,"name":"Game Of Thrones","date":1354579200000,"writer":null,"writerName":"George R. R. Martin"}],"otherInfoMap":{"newMapKey-phone":"newMapValue: +393333111999","newMapKey-address":"newMapValue: 123 Main Street, New York, NY 10030"},"otherInfoObjLiteral":{"newObjKey-address":"newObjValue: 123 Main Street, New York, NY 10030","newObjKey-phone":"newObjValue: +393333111999"},"id":1,"name":"George R. R. Martin"}'));
const writerParsed = objectMapper.parse<Writer>(jsonData, {mainCreator: () => [Writer]});
t.assert(writerParsed instanceof Writer);
t.is(writerParsed.id, 1);
t.is(writerParsed.name, 'George R. R. Martin');
t.assert(writerParsed.otherInfoMap instanceof Map);
t.is(writerParsed.otherInfoMap.get('phone'), '+393333111999');
t.is(writerParsed.otherInfoMap.get('address'), '123 Main Street, New York, NY 10030');
t.is(writerParsed.otherInfoObjLiteral.phone, '+393333111999');
t.is(writerParsed.otherInfoObjLiteral.address, '123 Main Street, New York, NY 10030');
t.assert(writerParsed.books.length === 1);
t.assert(writerParsed.books[0] instanceof Book);
t.assert(writerParsed.books[0].date instanceof Date);
t.is(writerParsed.books[0].id, 1);
t.is(writerParsed.books[0].name, 'Game Of Thrones');
t.deepEqual(writerParsed.books[0].date, new Date(Date.UTC(2012, 11, 4)));
t.is(writerParsed.books[0].writer, null);
}); | the_stack |
import { Container, Inject, Service } from "typedi";
import { DBManager } from "@modules/db";
import { ProjectsService } from "@modules/resources/projects/service";
import { TestsRunner } from "@modules/runner";
import { BuildStatusEnum, BuildTriggerEnum, ICreateBuildRequestPayload } from "@modules/resources/builds/interface";
import { PLATFORM } from "@crusher-shared/types/platform";
import { ICreateTemplatePayload, ICreateTestPayload, ITemplatesTable, ITestTable } from "@modules/resources/tests/interface";
import { getSnakedObject, isOpenSourceEdition } from "@utils/helper";
import { iAction } from "@crusher-shared/types/action";
import { RedisManager } from "@modules/redis";
import { v4 as uuidv4 } from "uuid";
import { CamelizeResponse } from "@modules/decorators/camelizeResponse";
import { KeysToCamelCase } from "@modules/common/typescript/interface";
import { BrowserEnum } from "@modules/runner/interface";
import { BuildReportStatusEnum } from "../buildReports/interface";
import { BadRequestError } from "routing-controllers";
import { merge } from "lodash";
import { ActionsInTestEnum } from "@crusher-shared/constants/recordedActions";
@Service()
class TestService {
private dbManager: DBManager;
private redisManager: RedisManager;
@Inject()
private projectService: ProjectsService;
@Inject()
private testsRunner: TestsRunner;
constructor() {
this.dbManager = Container.get(DBManager);
this.redisManager = Container.get(RedisManager);
}
async saveTempTest(events: Array<iAction>): Promise<{ insertId: string }> {
const keyId = `temp_test_${uuidv4()}`;
await this.redisManager.set(keyId, JSON.stringify(events), { expiry: { type: "s", value: 10 * 60 } });
return { insertId: keyId };
}
async getTempTest(tempTestId): Promise<{ events: Array<iAction> }> {
const result = await this.redisManager.get(tempTestId);
return { events: JSON.parse(result) };
}
// Modifies the events actions object directly
private async handleTemplateActions(templateActions: Array<iAction>, projectId: number, userId: number) {
const promiseArr = [];
for (const templateAction of templateActions) {
if (templateAction.payload.meta && templateAction.payload.meta.id) {
// Do nothing
continue;
}
promiseArr.push(
this.createTemplate({ name: templateAction.name, events: templateAction.payload.meta.actions, projectId, userId }).then((insertRecord) => {
templateAction.payload.meta.id = insertRecord.insertId;
delete templateAction.payload.meta.actions;
return true;
}),
);
}
return Promise.all(promiseArr);
}
async createTest(testInfo: Omit<ICreateTestPayload, "events"> & { events: Array<iAction> }): Promise<{ insertId: number }> {
const templateActions = testInfo.events.filter((event) => event.type === ActionsInTestEnum.RUN_TEMPLATE);
await this.handleTemplateActions(templateActions, testInfo.projectId, testInfo.userId);
return this.dbManager.insert(
`INSERT INTO public.tests (project_id, name, events, user_id, featured_video_url, featured_screenshot_url) VALUES (?, ?, ?, ?, ?, ?)`,
[
testInfo.projectId,
testInfo.name,
JSON.stringify(testInfo.events),
testInfo.userId,
testInfo.featuredVideoUrl ? testInfo.featuredVideoUrl : null,
testInfo.featuredScreenshotUrl ? testInfo.featuredScreenshotUrl : null,
],
);
}
async updateTestSteps(testId: number, steps: Array<iAction>) {
return this.dbManager.update(`UPDATE public.tests SET events = ? WHERE id = ?`, [JSON.stringify(steps), testId]);
}
async linkToDraftBuild(buildId: number, testId: number) {
return this.dbManager.update("UPDATE public.tests SET draft_job_id = ? WHERE id = ?", [buildId, testId]);
}
async updateTest(testId: number, newInfo: { name: string; tags: string; runAfter: number }) {
const { name, tags, runAfter } = newInfo;
return this.dbManager.update(`UPDATE public.tests SET name = ?, tags = ?, run_after = ? WHERE id = ?`, [name, tags || "", runAfter, testId]);
}
async runTestsInProject(
projectId: number,
userId: number,
customTestsConfig: Partial<ICreateBuildRequestPayload> = {},
buildMeta: { github?: { repoName: string; commitId: string }; disableBaseLineComparisions?: boolean } = {},
overideBaseLineBuildId: number | null = null,
browsers = [BrowserEnum.CHROME],
) {
const testsData = await this.getTestsInProject(projectId, true);
if (!testsData.list.length) throw new BadRequestError("No tests available to run");
const projectRecord = await this.projectService.getProject(projectId);
const meta: { isProjectLevelBuild: boolean; github?: { repoName: string }; disableBaseLineComparisions?: boolean } = {
isProjectLevelBuild: true,
disableBaseLineComparisions: !!buildMeta.disableBaseLineComparisions,
};
if (buildMeta.github) {
meta.github = buildMeta.github;
}
return this.testsRunner.runTests(
await this.getFullTestArr(testsData.list),
merge(
{
userId: userId,
projectId: projectId,
host: "null",
status: BuildStatusEnum.CREATED,
buildTrigger: BuildTriggerEnum.MANUAL,
browser: browsers,
isDraftJob: false,
config: { shouldRecordVideo: true, testIds: testsData.list.map((test) => test.id) },
meta: meta,
},
customTestsConfig,
),
overideBaseLineBuildId ? overideBaseLineBuildId : projectRecord.baselineJobId,
);
}
@CamelizeResponse()
async getCompleteTestInfo(testId: number) {
return this.dbManager.fetchSingleRow(
`SELECT tests.*, projects.id as project_id, projects.name as project_name, users.id as user_id, users.name as user_name FROM public.tests, public.projects, public.users WHERE tests.id = ? AND tests.project_id = projects.id AND users.id=tests.user_id`,
[testId],
);
}
@CamelizeResponse()
private _runCamelizeFetchAllQuery(query, values) {
return this.dbManager.fetchAllRows(query, values);
}
async getTestsInProject(projectId: number, findOnlyActiveTests = false, filter: { search?: string; status?: BuildReportStatusEnum; page?: number } = {}) {
const PER_PAGE_LIMIT = 15;
let additionalSelectColumns = "";
let additionalFromSource = "";
const queryParams: Array<any> = [];
if (filter.search) {
additionalSelectColumns += "ts_rank_cd(to_tsvector(COALESCE(commit_name, '')), query) as rank";
additionalFromSource += `to_tsquery(?) query`;
queryParams.push(filter.search);
}
let query = `SELECT tests.*, tests.draft_job_id as draft_job_id, tests.featured_clip_video_url as featured_clip_video_url, tests.featured_video_url as featured_video_url, users.id as user_id, users.name as user_name, jobs.status as draft_build_status, job_reports.status as draft_build_report_status ${
additionalSelectColumns ? `, ${additionalSelectColumns}` : ""
} FROM public.tests, public.users, public.jobs, public.job_reports ${
additionalFromSource ? `, ${additionalFromSource}` : ""
} WHERE tests.project_id = ? AND users.id = tests.user_id AND jobs.id = tests.draft_job_id AND job_reports.id = jobs.latest_report_id`;
queryParams.push(projectId);
let page = 0;
if (filter.page) page = filter.page;
if (findOnlyActiveTests) {
query += " AND tests.deleted = ?";
queryParams.push(findOnlyActiveTests ? false : true);
}
if (filter.status) {
query += " AND job_reports.status = ?";
queryParams.push(filter.status);
}
if (filter.search) {
query += ` AND to_tsvector(COALESCE(test.name, '')) @@ query`;
}
const totalRecordCountQuery = `SELECT COUNT(*) count FROM (${query}) custom_query`;
const totalRecordCountQueryResult = await this.dbManager.fetchSingleRow(totalRecordCountQuery, queryParams);
if (filter.search) {
query += " ORDER BY tests.created_at DESC, rank DESC";
} else {
query += " ORDER BY tests.created_at DESC";
}
if (filter.page && filter.page !== -1) {
query += " LIMIT ? OFFSET ?";
// Weird bug in node-mysql2
// https://github.com/sidorares/node-mysql2/issues/1239#issuecomment-760086130
queryParams.push(`${PER_PAGE_LIMIT}`);
queryParams.push(`${filter.page * PER_PAGE_LIMIT}`);
}
return { totalPages: Math.ceil(totalRecordCountQueryResult.count / PER_PAGE_LIMIT), list: await this._runCamelizeFetchAllQuery(query, queryParams) };
}
async deleteTest(testId: number) {
return this.dbManager.update(`UPDATE public.tests SET deleted = ? WHERE id = ?`, [true, testId]);
}
async updateMeta(meta: string, testId: number) {
return this.dbManager.update("UPDATE public.tests SET meta = ? WHERE id = ?", [meta, testId]);
}
@CamelizeResponse()
async getTest(testId: number): Promise<KeysToCamelCase<ITestTable>> {
return this.dbManager.fetchSingleRow("SELECT * FROM public.tests WHERE id = ?", [testId]);
}
// With template actions included
@CamelizeResponse()
async getFullTest(testRecord: KeysToCamelCase<ITestTable>): Promise<KeysToCamelCase<ITestTable>> {
const actions = JSON.parse(testRecord.events);
const templateActions = actions.filter((action) => action.type === ActionsInTestEnum.RUN_TEMPLATE);
await Promise.all(
templateActions.map(async (action) => {
if (action.payload.meta.id) {
const template = await this.getTemplate(action.payload.meta.id);
action.payload.meta.actions = JSON.parse(template.events);
}
}),
);
testRecord.events = JSON.stringify(actions);
return testRecord;
}
async getFullTestArr(testRecords: Array<KeysToCamelCase<ITestTable>>): Promise<Array<KeysToCamelCase<ITestTable>>> {
return Promise.all(testRecords.map((testRecord) => this.getFullTest(testRecord)));
}
async addFeaturedVideo(featuredVideoUrl: string, lastSecondsClipVideoUrl: string, testId: number): Promise<{ insertId: number }> {
return this.dbManager.update("UPDATE public.tests SET featured_video_url = ?, featured_clip_video_url = ? WHERE id = ?", [
featuredVideoUrl,
lastSecondsClipVideoUrl,
testId,
]);
}
@CamelizeResponse()
async getTestsFromIdList(testIds: Array<number>): Promise<Array<KeysToCamelCase<ITestTable>>> {
return this.dbManager.fetchAllRows(`SELECT * FROM public.tests WHERE id IN (${new Array(testIds.length).fill("?").join(", ")})`, [...testIds]);
}
// Specifically for run after this test
async getCompleteTestsArray(tests: Array<KeysToCamelCase<ITestTable>>): Promise<Array<KeysToCamelCase<ITestTable>>> {
const testsMap = tests.reduce((acc, test) => {
return { ...acc, [test.id]: test };
}, {});
for (const test of tests) {
const events = JSON.parse(test.events);
const runAfterTestAction = events.find((event) => event.type === ActionsInTestEnum.RUN_AFTER_TEST);
if (runAfterTestAction) {
const runAfterTestId = runAfterTestAction.payload.meta.value;
if (!testsMap[runAfterTestId]) {
testsMap[runAfterTestId] = await this.getTest(parseInt(runAfterTestId));
}
}
}
return Object.values(testsMap);
}
async createTemplate(payload: Omit<ICreateTemplatePayload, "events"> & { events: Array<iAction> }) {
return this.dbManager.insert("INSERT INTO public.templates (name, events, project_id, user_id) VALUES (?, ?, ?, ?)", [
payload.name,
JSON.stringify(payload.events),
payload.projectId ? payload.projectId : null,
payload.userId ? payload.userId : null,
]);
}
@CamelizeResponse()
async getTemplates(name: string): Promise<Array<KeysToCamelCase<ITemplatesTable>>> {
return this.dbManager.fetchAllRows(`SELECT * FROM public.templates WHERE name LIKE ?`, [name ? `%${name}%` : "%"]);
}
@CamelizeResponse()
async getTemplate(id: number): Promise<KeysToCamelCase<ITemplatesTable>> {
return this.dbManager.fetchSingleRow(`SELECT * FROM public.templates WHERE id = ?`, [id]);
}
}
export { TestService }; | the_stack |
function fixDefaultNamespaceAndLowerCase(key) {
key = key.toLowerCase();
if (!key.includes(':')) key = 'android:' + key;
return key;
}
module androidui.attr {
import View = android.view.View;
import ViewGroup = android.view.ViewGroup;
import Gravity = android.view.Gravity;
import Drawable = android.graphics.drawable.Drawable;
import ColorDrawable = android.graphics.drawable.ColorDrawable;
import Color = android.graphics.Color;
import ColorStateList = android.content.res.ColorStateList;
import Resources = android.content.res.Resources;
import Context = android.content.Context;
import TypedValue = android.util.TypedValue;
export class AttrBinder {
private host:View|ViewGroup.LayoutParams;
private attrChangeMap:Map<string, (newValue:any)=>void>;
private attrStashMap:Map<string, ()=>any>;
private classAttrBindMap:AttrBinder.ClassBinderMap;
private objectRefs = [];
private mContext:Context;
constructor(host:View|ViewGroup.LayoutParams){
this.host = host;
}
setClassAttrBind(classAttrBind:AttrBinder.ClassBinderMap):void {
if (classAttrBind) {
this.classAttrBindMap = classAttrBind;
}
}
addAttr(attrName:string, onAttrChange:(newValue:any)=>void, stashAttrValueWhenStateChange?:()=>any):void {
if(!attrName) return;
attrName = fixDefaultNamespaceAndLowerCase(attrName);
if(onAttrChange){
if (!this.attrChangeMap) {
this.attrChangeMap = new Map<string, (newValue:any)=>void>();
}
this.attrChangeMap.set(attrName, onAttrChange);
}
if(stashAttrValueWhenStateChange) {
this.attrStashMap = new Map<string, ()=>any>();
this.attrStashMap.set(attrName, stashAttrValueWhenStateChange);
}
}
onAttrChange(attrName:string, attrValue:any, context:Context):void {
this.mContext = context;
if(!attrName) return;
attrName = fixDefaultNamespaceAndLowerCase(attrName);
let onAttrChangeCall = this.attrChangeMap && this.attrChangeMap.get(attrName);
if(onAttrChangeCall) {
onAttrChangeCall.call(this.host, attrValue, this.host);
}
if(this.classAttrBindMap) {
this.classAttrBindMap.callSetter(attrName, this.host, attrValue, this);
}
}
/**
* @returns {string} undefined if not set get callback on addAttr
*/
getAttrValue(attrName:string):string {
if(!attrName) return undefined;
attrName = fixDefaultNamespaceAndLowerCase(attrName);
let getAttrCall = this.attrStashMap && this.attrStashMap.get(attrName);
let value;
if(getAttrCall){
value = getAttrCall.call(this.host);
} else if (this.classAttrBindMap) {
value = this.classAttrBindMap.callGetter(attrName, this.host);
}
if(value == null) return null;
if(typeof value === "number" || typeof value === "boolean" || typeof value === "string") return value+'';
return this.setRefObject(value);
}
private getRefObject(ref:string):any{
if(ref && ref.startsWith('@ref/')){
ref = ref.substring('@ref/'.length);
let index = Number.parseInt(ref);
if(Number.isInteger(index)){
return this.objectRefs[index];
}
}
}
private setRefObject(obj:any):string{
let index = this.objectRefs.indexOf(obj);
if(index>=0) return '@ref/'+index;
this.objectRefs.push(obj);
return '@ref/'+(this.objectRefs.length-1);
}
/**
* @param value
* @returns {[top, right, bottom, left]}
*/
parsePaddingMarginTRBL(value):number[]{
value = (value + '');
let parts = [];
for(let part of value.split(' ')){
if(part) parts.push(part);
}
let trbl: Array<string>;
switch (parts.length){
case 1 : trbl = [parts[0], parts[0], parts[0], parts[0]]; break;
case 2 : trbl = [parts[0], parts[1], parts[0], parts[1]]; break;
case 3 : trbl = [parts[0], parts[1], parts[2], parts[1]]; break;
case 4 : trbl = [parts[0], parts[1], parts[2], parts[3]]; break;
}
if (trbl) {
return trbl.map((v) => this.parseDimension(v));
}
throw Error('not a padding or margin value : '+value);
}
parseEnum(value, enumMap:Map<string,number>, defaultValue:number):number {
if(Number.isInteger(value)){
return value;
}
if(enumMap.has(value)){
return enumMap.get(value);
}
return defaultValue;
}
parseBoolean(value, defaultValue = true):boolean{
if(value===false) return false;
else if(value===true) return true;
let res = this.mContext ? this.mContext.getResources() : Resources.getSystem();
if (typeof value === "string") {
return AttrValueParser.parseBoolean(res, value, defaultValue);
}
return defaultValue;
}
parseGravity(s:string, defaultValue=Gravity.NO_GRAVITY):number {
let gravity = Number.parseInt(s);
if(Number.isInteger(gravity)) return gravity;
return Gravity.parseGravity(s, defaultValue);
}
parseDrawable(s:string):Drawable{
if(!s) return null;
if((<any>s) instanceof Drawable) return <Drawable><any>s;
if(s.startsWith('@ref/')){
let refObj = this.getRefObject(s);
if(refObj) return refObj;
}
let res = this.mContext ? this.mContext.getResources() : Resources.getSystem();
s = (s + '').trim();
return AttrValueParser.parseDrawable(res, s);
}
parseColor(value:string, defaultValue?:number):number{
let color = Number.parseInt(value);
if(Number.isInteger(color)) return color;
let res = this.mContext ? this.mContext.getResources() : Resources.getSystem();
color = AttrValueParser.parseColor(res, value, defaultValue);
if(isNaN(color)){
return Color.BLACK;
}
return color;
}
parseColorList(value:string):ColorStateList{
if(!value) return null;
if((<any>value) instanceof ColorStateList) return <ColorStateList><any>value;
if(typeof value == 'number') return ColorStateList.valueOf(<number><any>value);
if(value.startsWith('@ref/')){
let refObj = this.getRefObject(value);
if(refObj) return refObj;
}
let res = this.mContext ? this.mContext.getResources() : Resources.getSystem();
return AttrValueParser.parseColorStateList(res, value);
}
parseInt(value, defaultValue = 0):number{
if(typeof value == 'number') return <number><any>value;
let res = this.mContext ? this.mContext.getResources() : Resources.getSystem();
return AttrValueParser.parseInt(res, value, defaultValue);
}
parseFloat(value, defaultValue = 0):number{
if(typeof value == 'number') return <number><any>value;
let res = this.mContext ? this.mContext.getResources() : Resources.getSystem();
return AttrValueParser.parseFloat(res, value, defaultValue);
}
parseDimension(value, defaultValue = 0, baseValue = 0):number{
if(typeof value == 'number') return <number><any>value;
let res = this.mContext ? this.mContext.getResources() : Resources.getSystem();
return AttrValueParser.parseDimension(res, value, defaultValue, baseValue);
}
parseNumberPixelOffset(value, defaultValue = 0, baseValue = 0):number{
if(typeof value == 'number') return <number><any>value;
let res = this.mContext ? this.mContext.getResources() : Resources.getSystem();
return AttrValueParser.parseDimensionPixelOffset(res, value, defaultValue, baseValue);
}
parseNumberPixelSize(value, defaultValue = 0, baseValue = 0):number{
if(typeof value == 'number') return <number><any>value;
let res = this.mContext ? this.mContext.getResources() : Resources.getSystem();
return AttrValueParser.parseDimensionPixelSize(res, value, defaultValue, baseValue);
}
parseString(value, defaultValue?:string):string{
let res = this.mContext ? this.mContext.getResources() : Resources.getSystem();
if(typeof value === 'string') {
return AttrValueParser.parseString(res, value, defaultValue);
}
return defaultValue;
}
parseStringArray(value):string[] {
if(typeof value === 'string') {
if(value.startsWith('@ref/')){
let refObj = this.getRefObject(value);
if(refObj) return refObj;
}
let res = this.mContext ? this.mContext.getResources() : Resources.getSystem();
return AttrValueParser.parseTextArray(res, value);
}
return null;
}
}
export module AttrBinder {
export class ClassBinderMap {
binderMap:Map<string, ClassBinderValue>;
constructor(copyBinderMap?: Map<string, androidui.attr.AttrBinder.ClassBinderValue>) {
this.binderMap = new Map<string, ClassBinderValue>(copyBinderMap);
}
set(key:string, value?:androidui.attr.AttrBinder.ClassBinderValue):ClassBinderMap {
key = fixDefaultNamespaceAndLowerCase(key);
this.binderMap.set(key, value);
return this;
}
get(key:string):androidui.attr.AttrBinder.ClassBinderValue {
key = fixDefaultNamespaceAndLowerCase(key);
return this.binderMap.get(key);
}
private callSetter(attrName:string, host:android.view.View|android.view.ViewGroup.LayoutParams, attrValue:any, attrBinder:AttrBinder):void {
if (!attrName) return;
let value = this.get(attrName);
if (value) {
value.setter.call(host, host, attrValue, attrBinder);
}
}
private callGetter(attrName:string, host:android.view.View|android.view.ViewGroup.LayoutParams): any {
if (!attrName) return;
let value = this.get(attrName);
if (value) {
return value.getter.call(host, host);
}
}
}
export interface ClassBinderValue {
setter:(host:android.view.View|android.view.ViewGroup.LayoutParams, attrValue:any, attrBinder:AttrBinder) => void;
getter?:(host:android.view.View|android.view.ViewGroup.LayoutParams) => any;
}
}
} | the_stack |
namespace PE {
export class Battle_Scene extends Scene_Base {
battle: Battle_Manager;
viewport: Sprite;
sprites: {};
hpbars: {};
layers: { bg: Sprite } = { bg: undefined };
message: PE.Battle.UI.Window_BattleMessage;
battleCommands: PE.Battle.UI.BattleCommands;
partyBar: Sprite;
HUD: Sprite;
_weather: SpriteWeather;
constructor() {
super();
let p1 = [];
let p2 = [];
p1.push(new Pokemon.Pokemon(POKEDEX.ABOMASNOW, 100));
p1.push(new Pokemon.Pokemon(POKEDEX.KYOGRE, 100));
for (let index = 0; index < 3; index++) {
p1.push(PE.Pokemon.getRandomPokemon(100));
p2.push(PE.Pokemon.getRandomPokemon(100));
}
$BattleManager.init(p1, p2);
}
create() {
super.create();
this.viewport = new Sprite(new Bitmap(Graphics.width, Graphics.height));
this.addChild(this.viewport);
this.createBackground();
this.createWeather();
this.HUD = new Sprite(new Bitmap(Graphics.width, Graphics.height));
this.addChild(this.HUD);
this.createMessageWindow();
this.createUI();
}
createBackground() {
this.layers.bg = new Sprite(new Bitmap(Graphics.width, Graphics.height));
this.layers.bg.bitmap = ImageManager.loadBitmap("img/battlebacks/", "bg-forest");
this.layers.bg.x = Graphics.width / 2;
this.layers.bg.y = Graphics.height;
this.layers.bg.anchor.x = 0.5;
this.layers.bg.anchor.y = 1;
this.viewport.addChild(this.layers.bg);
this.sprites = {};
this.hpbars = {};
this.partyBar = new Sprite();
}
createMessageWindow() {
this.createWindowLayer();
this.message = new PE.Battle.UI.Window_BattleMessage();
this.addWindow(this.message);
this.message.subWindows().forEach(function(window) {
this.addWindow(window);
}, this);
}
createBattlers() {
for (const battler of $BattleManager.sides.foe.actives) {
if (this.sprites[battler.guid]) {
if (!this.sprites[battler.guid].visible) {
this.viewport.addChild(this.sprites[battler.guid]);
this.sprites[battler.guid].visible = true;
}
} else {
let fx = Graphics.width - 128;
let fy = 240;
this.sprites[battler.guid] = new PE.Sprites.Battler(battler.pokemon, PE.Sprites.BattlersFacing.Front);
this.sprites[battler.guid].x = fx;
this.sprites[battler.guid].y = fy;
this.sprites[battler.guid].scale.x = 2;
this.sprites[battler.guid].scale.y = 2;
this.sprites[battler.guid].anchor.x = 0.5;
this.sprites[battler.guid].anchor.y = 1;
this.viewport.addChild(this.sprites[battler.guid]);
}
if (this.hpbars[battler.guid]) {
if (!this.hpbars[battler.guid].visible) {
this.hpbars[battler.guid].visible = true;
this.viewport.addChild(this.hpbars[battler.guid]);
}
} else {
this.hpbars[battler.guid] = new nHPBar(battler, Graphics.width - 208, 48, true);
this.HUD.addChild(this.hpbars[battler.guid]);
}
}
for (const battler of $BattleManager.sides.player.actives) {
if (this.sprites[battler.guid]) {
if (!this.sprites[battler.guid].visible) {
this.viewport.addChild(this.sprites[battler.guid]);
this.sprites[battler.guid].visible = true;
}
} else {
let x = 128;
let y = Graphics.height - 64;
let index = battler.name + "_" + battler.slotIndex;
this.sprites[battler.guid] = new PE.Sprites.Battler(battler.pokemon, PE.Sprites.BattlersFacing.Back);
this.sprites[battler.guid].x = x;
this.sprites[battler.guid].y = y;
this.sprites[battler.guid].scale.x = 3;
this.sprites[battler.guid].scale.y = 3;
this.sprites[battler.guid].anchor.x = 0.5;
this.sprites[battler.guid].anchor.y = 1;
this.viewport.addChild(this.sprites[battler.guid]);
}
if (this.hpbars[battler.guid]) {
if (!this.hpbars[battler.guid].visible) {
this.hpbars[battler.guid].visible = true;
this.partyBar.addChild(this.hpbars[battler.guid]);
}
} else {
this.hpbars[battler.guid] = new nHPBar(battler, 16, Graphics.height - 64, false);
this.partyBar.addChild(this.hpbars[battler.guid]);
}
this.HUD.addChild(this.partyBar);
// this.partyBar.visible = false;
// this.movesSelection = new PE.UI._MovesSelection(battler);
// this.viewport.addChild(this.movesSelection);
}
// this.viewport.addChild(this.sprites);
// let trainer = Math.randomInt(243) + 1;
// this.sprites["front"] = new Sprites.TrainerFront("BW_" + trainer.padZero(3));
// this.sprites["front"].x = Graphics.width - 96;
// this.sprites["front"].anchor.x = 0.5;
// this.addChild(this.sprites["front"]);
// this.sprites["back"] = new Sprites.TrainerBack();
// this.sprites["back"].y = Graphics.height;
// this.sprites["back"].anchor.y = 1;
// this.addChild(this.sprites["back"]);
// $Battle.push(() => this.sprites["back"].start(), this);
}
createWeather() {
this._weather = new SpriteWeather();
this.addChild(this._weather);
}
setWeather(weather: WEATHERS) {
this._weather.setWeather(weather);
}
createUI() {
let x = Graphics.width - 168;
let y = Graphics.height - 108;
this.battleCommands = new PE.Battle.UI.BattleCommands(x, y);
this.battleCommands.visible = false;
this.viewport.addChild(this.battleCommands);
}
start() {
EventManager.on("SWITCH_BATTLERS", this.switchBattlers, this);
EventManager.on("SET_WEATHER", this.setWeather, this);
$BattleManager.startBattle();
this.createBattlers();
}
update() {
super.update();
if ($BattleManager.phase === Battle_Phase.Input || BattleEventQueue.waitMode === WaitModes.Animation) {
this.partyBar.visible = true;
} else {
this.partyBar.visible = false;
}
if (BattleEventQueue.isBusy()) return;
if (Input.isTriggered("ok")) {
this.endActionsSelection();
return;
}
// this.battleCommands.updateInput();
$BattleManager.update();
}
endActionsSelection() {
$BattleManager.endActionsSelection();
}
switchBattlers(out: Battle_Battler, enter: Battle_Battler) {
// this.sprites.bitmap = new Bitmap(Graphics.width, Graphics.height);
this.viewport.removeChild(this.sprites[out.guid]);
this.viewport.removeChild(this.hpbars[out.guid]);
this.partyBar.removeChild(this.hpbars[out.guid]);
this.sprites[out.guid].visible = false;
this.hpbars[out.guid].visible = false;
// delete this.sprites[out.guid];
this.createBattlers();
}
}
export class nHPBar extends Sprite {
animate: boolean;
currentHP: any;
__damage: any;
indicator: Sprite;
expbar: Sprite;
expbox: Sprite;
text: Sprite;
bar: Sprite;
box: Sprite;
constructor(public battler: Battle_Battler, public _x: number, public _y: number, public foe: boolean) {
super();
this.currentHP = this.battler.hp;
// this.pokemon.hpbar = this;
this.__damage = 0;
// this.__heal = 0;
this.create();
this.animate = false;
this.setWidth();
// this.completeCallbacks = [];
}
create() {
this.box = new Sprite();
this.box.bitmap = ImageManager.loadBitmap("img/pictures/Battle/", "hp_box");
this.box.x = this._x;
this.box.y = this._y;
this.addChild(this.box);
this.bar = new Sprite();
this.bar.bitmap = ImageManager.loadBitmap("img/pictures/Battle/", "hp_bar");
this.bar.x = this._x;
this.bar.y = this._y;
this.addChild(this.bar);
this.text = new Sprite(new Bitmap(Graphics.width, Graphics.height));
this.text.x = 0;
this.text.y = 0;
this.text.bitmap.fontSize = 28;
// this.text.bitmap.outlineWidth = 4;
this.text.bitmap.textColor = "#fff";
this.text.bitmap.drawText(this.battler.name, this._x, this._y - 30, Graphics.width, 24, "left");
this.text.bitmap.fontSize = 18;
this.text.bitmap.textColor = "#ff0";
this.text.bitmap.shadowColor = "#cccc00";
var w1 = this.text.bitmap.measureTextWidth("Lv. " + this.battler.level);
this.text.bitmap.drawText("Lv. ", this._x + (192 - w1), this._y - 24, Graphics.width, 24, "left");
this.text.bitmap.textColor = "#fff";
this.text.bitmap.shadowColor = DEFAULT_SHADOW_COLOR;
var w2 = this.text.bitmap.measureTextWidth("" + this.battler.level);
this.text.bitmap.drawText("" + this.battler.level, this._x + (192 - w2), this._y - 24, Graphics.width, 24, "left");
if (this.battler.pokemon.gender !== "N") {
if (this.battler.pokemon.gender === "M") {
var w3 = this.text.bitmap.measureTextWidth("♂ ");
this.text.bitmap.textColor = "#00bdf7";
this.text.bitmap.shadowColor = "#0097c5";
this.text.bitmap.drawText("♂", this._x + (192 - w1 - w3), this._y - 24, Graphics.width, 24, "left");
} else {
var w3 = this.text.bitmap.measureTextWidth("♀ ");
this.text.bitmap.textColor = "#ff3142";
this.text.bitmap.shadowColor = "#f30014";
this.text.bitmap.drawText("♀", this._x + (192 - w1 - w3), this._y - 24, Graphics.width, 24, "left");
}
}
if (!this.foe) {
this.expbox = new Sprite();
this.expbox.bitmap = ImageManager.loadBitmap("img/pictures/Battle/", "exp_box");
this.expbox.x = this._x;
this.expbox.y = this.box.y + 26;
this.addChild(this.expbox);
this.expbar = new Sprite();
this.expbar.bitmap = ImageManager.loadBitmap("img/pictures/Battle/", "exp_bar");
this.expbar.x = this._x;
this.expbar.y = this.box.y + 26;
this.expbar.setFrame(0, 0, 62, 8);
this.addChild(this.expbar);
this.indicator = new Sprite(new Bitmap(Graphics.height, Graphics.width));
this.indicator.bitmap.textColor = "#fff";
this.indicator.bitmap.fontSize = 20;
this.indicator.bitmap.drawText(this.battler.hp + "/" + this.battler.hp, this._x + 32, this.box.y + 8, 200, 24, "left");
this.addChild(this.indicator);
}
this.addChild(this.text);
EventManager.on("DAMAGE", this.damage, this);
}
update() {
super.update();
if (this.animate && this.currentHP !== this.__damage) this.updateDamage();
}
damage(battler: Battle_Battler, hp) {
if (battler.guid !== this.battler.guid) return;
BattleEventQueue.push(() => {
this.__damage = this.currentHP - hp;
if (this.__damage <= 0) this.__damage = 0;
this.animate = true;
$BattleManager.wait(WaitModes.Animation);
}, this);
}
updateDamage() {
this.currentHP--;
if (this.indicator) {
this.indicator.bitmap.clear();
this.indicator.bitmap.drawText(`${this.currentHP}/${this.battler.totalHP}`, this._x + 32, this.box.y + 8, 200, 24, "left");
}
// 192 is original the bar width
let width = Math.max(0, (192 * this.currentHP) / this.battler.totalHP);
this.bar.setFrame(0, 0, width, 24);
if (this.currentHP === this.__damage) this.complete();
}
complete() {
$BattleManager.wait(WaitModes.None);
BattleEventQueue.push(() => {
this.animate = false;
}, this);
}
setWidth() {
let width = Math.max(0, (192 * this.currentHP) / this.battler.totalHP);
this.bar.setFrame(0, 0, width, 24);
}
}
} | the_stack |
/* eslint-disable */
import * as grpc from "@grpc/grpc-js";
import {handleClientStreamingCall} from "@grpc/grpc-js/build/src/server-call";
import * as echo_echo_pb from "../echo/echo_pb";
interface IEchoServiceService extends grpc.ServiceDefinition<grpc.UntypedServiceImplementation> {
echo: IEchoServiceService_IEcho;
echoAbort: IEchoServiceService_IEchoAbort;
noOp: IEchoServiceService_INoOp;
serverStreamingEcho: IEchoServiceService_IServerStreamingEcho;
serverStreamingEchoAbort: IEchoServiceService_IServerStreamingEchoAbort;
clientStreamingEcho: IEchoServiceService_IClientStreamingEcho;
fullDuplexEcho: IEchoServiceService_IFullDuplexEcho;
halfDuplexEcho: IEchoServiceService_IHalfDuplexEcho;
}
interface IEchoServiceService_IEcho extends grpc.MethodDefinition<echo_echo_pb.EchoRequest, echo_echo_pb.EchoResponse> {
path: string; // "/grpc.gateway.testing.EchoService/Echo"
requestStream: false;
responseStream: false;
requestSerialize: grpc.serialize<echo_echo_pb.EchoRequest>;
requestDeserialize: grpc.deserialize<echo_echo_pb.EchoRequest>;
responseSerialize: grpc.serialize<echo_echo_pb.EchoResponse>;
responseDeserialize: grpc.deserialize<echo_echo_pb.EchoResponse>;
}
interface IEchoServiceService_IEchoAbort extends grpc.MethodDefinition<echo_echo_pb.EchoRequest, echo_echo_pb.EchoResponse> {
path: string; // "/grpc.gateway.testing.EchoService/EchoAbort"
requestStream: false;
responseStream: false;
requestSerialize: grpc.serialize<echo_echo_pb.EchoRequest>;
requestDeserialize: grpc.deserialize<echo_echo_pb.EchoRequest>;
responseSerialize: grpc.serialize<echo_echo_pb.EchoResponse>;
responseDeserialize: grpc.deserialize<echo_echo_pb.EchoResponse>;
}
interface IEchoServiceService_INoOp extends grpc.MethodDefinition<echo_echo_pb.Empty, echo_echo_pb.Empty> {
path: string; // "/grpc.gateway.testing.EchoService/NoOp"
requestStream: false;
responseStream: false;
requestSerialize: grpc.serialize<echo_echo_pb.Empty>;
requestDeserialize: grpc.deserialize<echo_echo_pb.Empty>;
responseSerialize: grpc.serialize<echo_echo_pb.Empty>;
responseDeserialize: grpc.deserialize<echo_echo_pb.Empty>;
}
interface IEchoServiceService_IServerStreamingEcho extends grpc.MethodDefinition<echo_echo_pb.ServerStreamingEchoRequest, echo_echo_pb.ServerStreamingEchoResponse> {
path: string; // "/grpc.gateway.testing.EchoService/ServerStreamingEcho"
requestStream: false;
responseStream: true;
requestSerialize: grpc.serialize<echo_echo_pb.ServerStreamingEchoRequest>;
requestDeserialize: grpc.deserialize<echo_echo_pb.ServerStreamingEchoRequest>;
responseSerialize: grpc.serialize<echo_echo_pb.ServerStreamingEchoResponse>;
responseDeserialize: grpc.deserialize<echo_echo_pb.ServerStreamingEchoResponse>;
}
interface IEchoServiceService_IServerStreamingEchoAbort extends grpc.MethodDefinition<echo_echo_pb.ServerStreamingEchoRequest, echo_echo_pb.ServerStreamingEchoResponse> {
path: string; // "/grpc.gateway.testing.EchoService/ServerStreamingEchoAbort"
requestStream: false;
responseStream: true;
requestSerialize: grpc.serialize<echo_echo_pb.ServerStreamingEchoRequest>;
requestDeserialize: grpc.deserialize<echo_echo_pb.ServerStreamingEchoRequest>;
responseSerialize: grpc.serialize<echo_echo_pb.ServerStreamingEchoResponse>;
responseDeserialize: grpc.deserialize<echo_echo_pb.ServerStreamingEchoResponse>;
}
interface IEchoServiceService_IClientStreamingEcho extends grpc.MethodDefinition<echo_echo_pb.ClientStreamingEchoRequest, echo_echo_pb.ClientStreamingEchoResponse> {
path: string; // "/grpc.gateway.testing.EchoService/ClientStreamingEcho"
requestStream: true;
responseStream: false;
requestSerialize: grpc.serialize<echo_echo_pb.ClientStreamingEchoRequest>;
requestDeserialize: grpc.deserialize<echo_echo_pb.ClientStreamingEchoRequest>;
responseSerialize: grpc.serialize<echo_echo_pb.ClientStreamingEchoResponse>;
responseDeserialize: grpc.deserialize<echo_echo_pb.ClientStreamingEchoResponse>;
}
interface IEchoServiceService_IFullDuplexEcho extends grpc.MethodDefinition<echo_echo_pb.EchoRequest, echo_echo_pb.EchoResponse> {
path: string; // "/grpc.gateway.testing.EchoService/FullDuplexEcho"
requestStream: true;
responseStream: true;
requestSerialize: grpc.serialize<echo_echo_pb.EchoRequest>;
requestDeserialize: grpc.deserialize<echo_echo_pb.EchoRequest>;
responseSerialize: grpc.serialize<echo_echo_pb.EchoResponse>;
responseDeserialize: grpc.deserialize<echo_echo_pb.EchoResponse>;
}
interface IEchoServiceService_IHalfDuplexEcho extends grpc.MethodDefinition<echo_echo_pb.EchoRequest, echo_echo_pb.EchoResponse> {
path: string; // "/grpc.gateway.testing.EchoService/HalfDuplexEcho"
requestStream: true;
responseStream: true;
requestSerialize: grpc.serialize<echo_echo_pb.EchoRequest>;
requestDeserialize: grpc.deserialize<echo_echo_pb.EchoRequest>;
responseSerialize: grpc.serialize<echo_echo_pb.EchoResponse>;
responseDeserialize: grpc.deserialize<echo_echo_pb.EchoResponse>;
}
export const EchoServiceService: IEchoServiceService;
export interface IEchoServiceServer {
echo: grpc.handleUnaryCall<echo_echo_pb.EchoRequest, echo_echo_pb.EchoResponse>;
echoAbort: grpc.handleUnaryCall<echo_echo_pb.EchoRequest, echo_echo_pb.EchoResponse>;
noOp: grpc.handleUnaryCall<echo_echo_pb.Empty, echo_echo_pb.Empty>;
serverStreamingEcho: grpc.handleServerStreamingCall<echo_echo_pb.ServerStreamingEchoRequest, echo_echo_pb.ServerStreamingEchoResponse>;
serverStreamingEchoAbort: grpc.handleServerStreamingCall<echo_echo_pb.ServerStreamingEchoRequest, echo_echo_pb.ServerStreamingEchoResponse>;
clientStreamingEcho: handleClientStreamingCall<echo_echo_pb.ClientStreamingEchoRequest, echo_echo_pb.ClientStreamingEchoResponse>;
fullDuplexEcho: grpc.handleBidiStreamingCall<echo_echo_pb.EchoRequest, echo_echo_pb.EchoResponse>;
halfDuplexEcho: grpc.handleBidiStreamingCall<echo_echo_pb.EchoRequest, echo_echo_pb.EchoResponse>;
}
export interface IEchoServiceClient {
echo(request: echo_echo_pb.EchoRequest, callback: (error: grpc.ServiceError | null, response: echo_echo_pb.EchoResponse) => void): grpc.ClientUnaryCall;
echo(request: echo_echo_pb.EchoRequest, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: echo_echo_pb.EchoResponse) => void): grpc.ClientUnaryCall;
echo(request: echo_echo_pb.EchoRequest, metadata: grpc.Metadata, options: Partial<grpc.CallOptions>, callback: (error: grpc.ServiceError | null, response: echo_echo_pb.EchoResponse) => void): grpc.ClientUnaryCall;
echoAbort(request: echo_echo_pb.EchoRequest, callback: (error: grpc.ServiceError | null, response: echo_echo_pb.EchoResponse) => void): grpc.ClientUnaryCall;
echoAbort(request: echo_echo_pb.EchoRequest, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: echo_echo_pb.EchoResponse) => void): grpc.ClientUnaryCall;
echoAbort(request: echo_echo_pb.EchoRequest, metadata: grpc.Metadata, options: Partial<grpc.CallOptions>, callback: (error: grpc.ServiceError | null, response: echo_echo_pb.EchoResponse) => void): grpc.ClientUnaryCall;
noOp(request: echo_echo_pb.Empty, callback: (error: grpc.ServiceError | null, response: echo_echo_pb.Empty) => void): grpc.ClientUnaryCall;
noOp(request: echo_echo_pb.Empty, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: echo_echo_pb.Empty) => void): grpc.ClientUnaryCall;
noOp(request: echo_echo_pb.Empty, metadata: grpc.Metadata, options: Partial<grpc.CallOptions>, callback: (error: grpc.ServiceError | null, response: echo_echo_pb.Empty) => void): grpc.ClientUnaryCall;
serverStreamingEcho(request: echo_echo_pb.ServerStreamingEchoRequest, options?: Partial<grpc.CallOptions>): grpc.ClientReadableStream<echo_echo_pb.ServerStreamingEchoResponse>;
serverStreamingEcho(request: echo_echo_pb.ServerStreamingEchoRequest, metadata?: grpc.Metadata, options?: Partial<grpc.CallOptions>): grpc.ClientReadableStream<echo_echo_pb.ServerStreamingEchoResponse>;
serverStreamingEchoAbort(request: echo_echo_pb.ServerStreamingEchoRequest, options?: Partial<grpc.CallOptions>): grpc.ClientReadableStream<echo_echo_pb.ServerStreamingEchoResponse>;
serverStreamingEchoAbort(request: echo_echo_pb.ServerStreamingEchoRequest, metadata?: grpc.Metadata, options?: Partial<grpc.CallOptions>): grpc.ClientReadableStream<echo_echo_pb.ServerStreamingEchoResponse>;
clientStreamingEcho(callback: (error: grpc.ServiceError | null, response: echo_echo_pb.ClientStreamingEchoResponse) => void): grpc.ClientWritableStream<echo_echo_pb.ClientStreamingEchoRequest>;
clientStreamingEcho(metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: echo_echo_pb.ClientStreamingEchoResponse) => void): grpc.ClientWritableStream<echo_echo_pb.ClientStreamingEchoRequest>;
clientStreamingEcho(options: Partial<grpc.CallOptions>, callback: (error: grpc.ServiceError | null, response: echo_echo_pb.ClientStreamingEchoResponse) => void): grpc.ClientWritableStream<echo_echo_pb.ClientStreamingEchoRequest>;
clientStreamingEcho(metadata: grpc.Metadata, options: Partial<grpc.CallOptions>, callback: (error: grpc.ServiceError | null, response: echo_echo_pb.ClientStreamingEchoResponse) => void): grpc.ClientWritableStream<echo_echo_pb.ClientStreamingEchoRequest>;
fullDuplexEcho(): grpc.ClientDuplexStream<echo_echo_pb.EchoRequest, echo_echo_pb.EchoResponse>;
fullDuplexEcho(options: Partial<grpc.CallOptions>): grpc.ClientDuplexStream<echo_echo_pb.EchoRequest, echo_echo_pb.EchoResponse>;
fullDuplexEcho(metadata: grpc.Metadata, options?: Partial<grpc.CallOptions>): grpc.ClientDuplexStream<echo_echo_pb.EchoRequest, echo_echo_pb.EchoResponse>;
halfDuplexEcho(): grpc.ClientDuplexStream<echo_echo_pb.EchoRequest, echo_echo_pb.EchoResponse>;
halfDuplexEcho(options: Partial<grpc.CallOptions>): grpc.ClientDuplexStream<echo_echo_pb.EchoRequest, echo_echo_pb.EchoResponse>;
halfDuplexEcho(metadata: grpc.Metadata, options?: Partial<grpc.CallOptions>): grpc.ClientDuplexStream<echo_echo_pb.EchoRequest, echo_echo_pb.EchoResponse>;
}
export class EchoServiceClient extends grpc.Client implements IEchoServiceClient {
constructor(address: string, credentials: grpc.ChannelCredentials, options?: Partial<grpc.ClientOptions>);
public echo(request: echo_echo_pb.EchoRequest, callback: (error: grpc.ServiceError | null, response: echo_echo_pb.EchoResponse) => void): grpc.ClientUnaryCall;
public echo(request: echo_echo_pb.EchoRequest, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: echo_echo_pb.EchoResponse) => void): grpc.ClientUnaryCall;
public echo(request: echo_echo_pb.EchoRequest, metadata: grpc.Metadata, options: Partial<grpc.CallOptions>, callback: (error: grpc.ServiceError | null, response: echo_echo_pb.EchoResponse) => void): grpc.ClientUnaryCall;
public echoAbort(request: echo_echo_pb.EchoRequest, callback: (error: grpc.ServiceError | null, response: echo_echo_pb.EchoResponse) => void): grpc.ClientUnaryCall;
public echoAbort(request: echo_echo_pb.EchoRequest, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: echo_echo_pb.EchoResponse) => void): grpc.ClientUnaryCall;
public echoAbort(request: echo_echo_pb.EchoRequest, metadata: grpc.Metadata, options: Partial<grpc.CallOptions>, callback: (error: grpc.ServiceError | null, response: echo_echo_pb.EchoResponse) => void): grpc.ClientUnaryCall;
public noOp(request: echo_echo_pb.Empty, callback: (error: grpc.ServiceError | null, response: echo_echo_pb.Empty) => void): grpc.ClientUnaryCall;
public noOp(request: echo_echo_pb.Empty, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: echo_echo_pb.Empty) => void): grpc.ClientUnaryCall;
public noOp(request: echo_echo_pb.Empty, metadata: grpc.Metadata, options: Partial<grpc.CallOptions>, callback: (error: grpc.ServiceError | null, response: echo_echo_pb.Empty) => void): grpc.ClientUnaryCall;
public serverStreamingEcho(request: echo_echo_pb.ServerStreamingEchoRequest, options?: Partial<grpc.CallOptions>): grpc.ClientReadableStream<echo_echo_pb.ServerStreamingEchoResponse>;
public serverStreamingEcho(request: echo_echo_pb.ServerStreamingEchoRequest, metadata?: grpc.Metadata, options?: Partial<grpc.CallOptions>): grpc.ClientReadableStream<echo_echo_pb.ServerStreamingEchoResponse>;
public serverStreamingEchoAbort(request: echo_echo_pb.ServerStreamingEchoRequest, options?: Partial<grpc.CallOptions>): grpc.ClientReadableStream<echo_echo_pb.ServerStreamingEchoResponse>;
public serverStreamingEchoAbort(request: echo_echo_pb.ServerStreamingEchoRequest, metadata?: grpc.Metadata, options?: Partial<grpc.CallOptions>): grpc.ClientReadableStream<echo_echo_pb.ServerStreamingEchoResponse>;
public clientStreamingEcho(callback: (error: grpc.ServiceError | null, response: echo_echo_pb.ClientStreamingEchoResponse) => void): grpc.ClientWritableStream<echo_echo_pb.ClientStreamingEchoRequest>;
public clientStreamingEcho(metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: echo_echo_pb.ClientStreamingEchoResponse) => void): grpc.ClientWritableStream<echo_echo_pb.ClientStreamingEchoRequest>;
public clientStreamingEcho(options: Partial<grpc.CallOptions>, callback: (error: grpc.ServiceError | null, response: echo_echo_pb.ClientStreamingEchoResponse) => void): grpc.ClientWritableStream<echo_echo_pb.ClientStreamingEchoRequest>;
public clientStreamingEcho(metadata: grpc.Metadata, options: Partial<grpc.CallOptions>, callback: (error: grpc.ServiceError | null, response: echo_echo_pb.ClientStreamingEchoResponse) => void): grpc.ClientWritableStream<echo_echo_pb.ClientStreamingEchoRequest>;
public fullDuplexEcho(options?: Partial<grpc.CallOptions>): grpc.ClientDuplexStream<echo_echo_pb.EchoRequest, echo_echo_pb.EchoResponse>;
public fullDuplexEcho(metadata?: grpc.Metadata, options?: Partial<grpc.CallOptions>): grpc.ClientDuplexStream<echo_echo_pb.EchoRequest, echo_echo_pb.EchoResponse>;
public halfDuplexEcho(options?: Partial<grpc.CallOptions>): grpc.ClientDuplexStream<echo_echo_pb.EchoRequest, echo_echo_pb.EchoResponse>;
public halfDuplexEcho(metadata?: grpc.Metadata, options?: Partial<grpc.CallOptions>): grpc.ClientDuplexStream<echo_echo_pb.EchoRequest, echo_echo_pb.EchoResponse>;
} | the_stack |
import { gunzip } from 'zlib';
import { Configuration, metricScope, Unit } from 'aws-embedded-metrics';
import type { Context } from 'aws-lambda';
import { AWSError, S3 } from 'aws-sdk';
import { SemVer } from 'semver';
import { extract } from 'tar-stream';
import { CatalogModel, PackageInfo } from '.';
import { CacheStrategy } from '../../caching';
import { DenyListClient } from '../deny-list/client.lambda-shared';
import type { CatalogBuilderInput } from '../payload-schema';
import * as aws from '../shared/aws.lambda-shared';
import * as constants from '../shared/constants';
import { requireEnv } from '../shared/env.lambda-shared';
import { MetricName, METRICS_NAMESPACE } from './constants';
Configuration.namespace = METRICS_NAMESPACE;
/**
* Regenerates the `catalog.json` object in the configured S3 bucket.
*
* @param event configuration for the rebuild job. In particular, the `rebuild`
* property can be set to `true` in order to trigger a full (i.e:
* non-incremental) rebuild of the object.
* @param context the lambda context in which this execution runs.
*
* @returns the information about the updated S3 object.
*/
export async function handler(event: CatalogBuilderInput, context: Context) {
console.log(JSON.stringify(event, null, 2));
const BUCKET_NAME = requireEnv('BUCKET_NAME');
const packages = new Map<string, Map<number, PackageInfo>>();
const denyList = await DenyListClient.newClient();
console.log('Loading existing catalog (if present)...');
const data = await aws.s3().getObject({ Bucket: BUCKET_NAME, Key: constants.CATALOG_KEY }).promise()
.catch((err: AWSError) => err.code !== 'NoSuchKey'
? Promise.reject(err)
: Promise.resolve({ /* no data */ } as S3.GetObjectOutput));
if (data.Body) {
console.log('Catalog found. Loading...');
const catalog: CatalogModel = JSON.parse(data.Body.toString('utf-8'));
for (const info of catalog.packages) {
const denyRule = denyList.lookup(info.name, info.version);
if (denyRule != null) {
console.log(`Dropping ${info.name}@${info.version} from catalog: ${denyRule.reason}`);
continue;
}
if (!packages.has(info.name)) {
packages.set(info.name, new Map());
}
packages.get(info.name)!.set(info.major, info);
}
}
// If defined, the function will invoke itself again to resume the work from that key (this
// happens only in "from scratch" or "rebuild" cases).
let nextStartAfter: string | undefined;
if (event.package) {
if (!event.package.key.endsWith(constants.PACKAGE_KEY_SUFFIX)) {
throw new Error(`The provided package key is invalid: ${event.package.key} does not end in ${constants.PACKAGE_KEY_SUFFIX}`);
}
console.log('Registering new packages...');
// note that we intentionally don't catch errors here to let these
// event go to the DLQ for manual inspection.
await appendPackage(packages, event.package.key, BUCKET_NAME, denyList);
}
// If we don't have a package in event, then we're refreshing the catalog. This is also true if we
// don't have a catalog body (from scratch) or if "startAfter" is set (continuation of from
// scratch).
if (!event?.package || !data.Body || event.startAfter) {
console.log('Recreating or refreshing catalog...');
const failures: any = {};
for await (const { Key: pkgKey } of relevantObjects(BUCKET_NAME, event.startAfter)) {
try {
await appendPackage(packages, pkgKey!, BUCKET_NAME, denyList);
} catch (e) {
failures[pkgKey!] = e;
}
// If we're getting short on time (1 minute out of 15 left), we'll be continuing in a new
// invocation after writing what we've done so far to S3...
if (context.getRemainingTimeInMillis() <= 60_000) {
nextStartAfter = pkgKey;
break;
}
}
for (const [key, error] of Object.entries(failures)) {
console.log(`Failed processing ${key}: ${error}`);
}
await metricScope((metrics) => async () => {
metrics.setDimensions();
const failedCount = Object.keys(failures).length;
console.log(`Marking ${failedCount} failed packages`);
metrics.putMetric(MetricName.FAILED_PACKAGES_ON_RECREATION, failedCount, Unit.Count);
})();
}
// Build the final data package...
console.log('Consolidating catalog...');
const catalog: CatalogModel = { packages: new Array<PackageInfo>(), updated: new Date().toISOString() };
for (const majors of packages.values()) {
for (const pkg of majors.values()) {
catalog.packages.push(pkg);
}
}
console.log(`There are now ${catalog.packages.length} registered package major versions`);
await metricScope((metrics) => async () => {
metrics.setDimensions();
metrics.putMetric(MetricName.REGISTERED_PACKAGES_MAJOR_VERSION, catalog.packages.length, Unit.Count);
metrics.putMetric(
MetricName.MISSING_CONSTRUCT_FRAMEWORK_COUNT,
catalog.packages.filter((pkg) => pkg.constructFramework == null).length,
Unit.Count,
);
metrics.putMetric(
MetricName.MISSING_CONSTRUCT_FRAMEWORK_VERSION_COUNT,
catalog.packages.filter(
(pkg) => pkg.constructFramework && pkg.constructFramework.majorVersion == null,
).length,
Unit.Count,
);
})();
// Clean up existing entries if necessary. In particular, remove the license texts as they make
// the catalog unnecessarily large, and may hinder some search queries' result quality.
for (const entry of catalog.packages) {
if (entry.metadata) {
delete (entry.metadata as any).licenseText;
}
}
// Upload the result to S3 and exit.
const result = await aws.s3().putObject({
Bucket: BUCKET_NAME,
Key: constants.CATALOG_KEY,
Body: JSON.stringify(catalog, null, 2),
ContentType: 'application/json',
CacheControl: CacheStrategy.default().toString(),
Metadata: {
'Lambda-Log-Group': context.logGroupName,
'Lambda-Log-Stream': context.logStreamName,
'Lambda-Run-Id': context.awsRequestId,
'Package-Count': `${catalog.packages.length}`,
},
}).promise();
if (nextStartAfter != null) {
console.log(`Will continue from ${nextStartAfter} in new invocation...`);
const nextEvent: CatalogBuilderInput = { ...event, startAfter: nextStartAfter };
// We start it asynchronously, as this function has a provisionned
// concurrency of 1 (so a synchronous attempt would always be throttled).
await aws.lambda().invokeAsync({
FunctionName: context.functionName,
InvokeArgs: JSON.stringify(nextEvent, null, 2),
}).promise();
}
return result;
}
/**
* A generator that asynchronously traverses the set of "interesting" objects
* found by listing the configured S3 bucket. Those objects correspond to all
* npm package tarballs present under the `packages/` prefix in the bucket.
*
* @param bucket the bucket in which to list objects
* @param startAfter the key to start reading from, if provided.
*/
async function* relevantObjects(bucket: string, startAfter?: string) {
const request: S3.ListObjectsV2Request = {
Bucket: bucket,
Prefix: constants.STORAGE_KEY_PREFIX,
StartAfter: startAfter,
};
do {
const result = await aws.s3().listObjectsV2(request).promise();
for (const object of result.Contents ?? []) {
if (!object.Key?.endsWith(constants.PACKAGE_KEY_SUFFIX)) {
continue;
}
// We only register packages if they have AT LEAST docs in one language.
const tsDocs = `${object.Key.substring(0, object.Key.length - constants.PACKAGE_KEY_SUFFIX.length)}${constants.DOCS_KEY_SUFFIX_TYPESCRIPT}`;
const pyDocs = `${object.Key.substring(0, object.Key.length - constants.PACKAGE_KEY_SUFFIX.length)}${constants.DOCS_KEY_SUFFIX_PYTHON}`;
const javaDocs = `${object.Key.substring(0, object.Key.length - constants.PACKAGE_KEY_SUFFIX.length)}${constants.DOCS_KEY_SUFFIX_JAVA}`;
const csharpDocs = `${object.Key.substring(0, object.Key.length - constants.PACKAGE_KEY_SUFFIX.length)}${constants.DOCS_KEY_SUFFIX_CSHARP}`;
if (!(await aws.s3ObjectExists(bucket, tsDocs)) &&
!(await aws.s3ObjectExists(bucket, pyDocs)) &&
!(await aws.s3ObjectExists(bucket, javaDocs)) &&
!(await aws.s3ObjectExists(bucket, csharpDocs))) {
continue;
}
yield object;
}
request.ContinuationToken = result.NextContinuationToken;
} while (request.ContinuationToken != null);
}
async function appendPackage(packages: any, pkgKey: string, bucketName: string, denyList: DenyListClient) {
console.log(`Processing key: ${pkgKey}`);
const [, packageName, versionStr] = constants.STORAGE_KEY_FORMAT_REGEX.exec(pkgKey)!;
const version = new SemVer(versionStr);
const found = packages.get(packageName)?.get(version.major);
// If the version is === to the current latest, we'll be replacing that (so re-generated metadata are taken into account)
if (found != null && version.compare(found.version) < 0) {
console.log(`Skipping ${packageName}@${version} because it is not newer than the existing ${found.version}`);
return;
}
console.log(`Checking if ${packageName}@${version.version} matches a deny list rule`);
const blocked = denyList.lookup(packageName, version.version);
if (blocked) {
console.log(`Skipping ${packageName}@${version.version} because it is blocked by the deny list rule: ${JSON.stringify(blocked)}`);
return;
}
console.log(`Registering ${packageName}@${version}`);
// Donwload the tarball to inspect the `package.json` data therein.
const pkg = await aws.s3().getObject({ Bucket: bucketName, Key: pkgKey }).promise();
const metadataKey = pkgKey.replace(constants.PACKAGE_KEY_SUFFIX, constants.METADATA_KEY_SUFFIX);
const metadataResponse = await aws.s3().getObject({ Bucket: bucketName, Key: metadataKey }).promise();
const manifest = await new Promise<Buffer>((ok, ko) => {
gunzip(Buffer.from(pkg.Body! as any), (err, tar) => {
if (err) {
return ko(err);
}
extract()
.on('entry', (header, stream, next) => {
if (header.name !== 'package/package.json') {
// Not the file we are looking for, skip ahead (next run-loop tick).
return setImmediate(next);
}
const chunks = new Array<Buffer>();
return stream
.on('data', (chunk) => chunks.push(Buffer.from(chunk)))
.once('end', () => {
ok(Buffer.concat(chunks));
next();
})
.resume();
})
.once('finish', () => {
ko(new Error('Could not find package/package.json in tarball!'));
})
.write(tar, (writeErr) => {
if (writeErr) {
ko(writeErr);
}
});
});
});
// Add the PackageInfo into the working set
const pkgMetadata = JSON.parse(manifest.toString('utf-8'));
const npmMetadata = JSON.parse(metadataResponse?.Body?.toString('utf-8') ?? '{}');
const major = new SemVer(pkgMetadata.version).major;
if (!packages.has(pkgMetadata.name)) {
packages.set(pkgMetadata.name, new Map());
}
packages.get(pkgMetadata.name)!.set(major, {
author: pkgMetadata.author,
description: pkgMetadata.description,
keywords: pkgMetadata.keywords,
languages: pkgMetadata.jsii.targets,
license: pkgMetadata.license,
major,
metadata: npmMetadata,
name: pkgMetadata.name,
version: pkgMetadata.version,
});
} | the_stack |
import { CustomFeaturesTile } from './customFeaturesTile';
// @ts-ignore
import concat from 'concat-stream';
import path from 'path';
/**
* Draws a tile indicating the number of features that exist within the tile,
* visible when zoomed in closer. The number is drawn in the center of the tile
* and by default is surrounded by a colored circle with border. By default a
* tile border is drawn and the tile is colored (transparently most likely). The
* paint objects for each draw type can be modified to or set to null (except
* for the text paint object).
*/
export class NumberFeaturesTile extends CustomFeaturesTile {
textSize: number;
textFont: string;
textColor: string;
circleStrokeWidth: number;
circleBorderColor: string;
circleFillColor: string;
circlePaddingPercentage: number;
defaultFontRegistered: boolean;
constructor() {
super();
this.textSize = 18;
this.textFont = 'PT Mono';
this.textColor = 'rgba(255, 255, 255, 1.0)';
this.circleStrokeWidth = 3;
this.circleBorderColor = 'rgba(0, 0, 0, 0.25)';
this.circleFillColor = 'rgba(0, 0, 0, 1.0)';
this.circlePaddingPercentage = 0.25;
this.defaultFontRegistered = false;
}
/**
* register a font
* @private
*/
_registerDefaultFont(): void {
if (!this.defaultFontRegistered) {
if (CustomFeaturesTile.useNodeCanvas) {
// eslint-disable-next-line @typescript-eslint/no-var-requires
const Canvas = require('canvas');
Canvas.registerFont(path.join(__dirname, '..', '..', '..', 'fonts', 'PTMono-Regular.ttf'), {
family: 'PT Mono',
});
}
this.defaultFontRegistered = true;
}
}
/**
* Get the text size
* @return {Number} text size
*/
getTextSize(): number {
return this.textSize;
}
/**
* Set the text size
* @param {Number} textSize text size
*/
setTextSize(textSize: number): void {
this.textSize = textSize;
}
/**
* Get the text color
* @return {String} text color
*/
getTextColor(): string {
return this.textColor;
}
/**
* Set the text color
* @param {String} textColor text color
*/
setTextColor(textColor: string): void {
this.textColor = textColor;
}
/**
* Get the circle stroke width
* @return {Number} circle stroke width
*/
getCircleStrokeWidth(): number {
return this.circleStrokeWidth;
}
/**
* Set the circle stroke width
* @param {Number} circleStrokeWidth circle stroke width
*/
setCircleStrokeWidth(circleStrokeWidth: number): void {
this.circleStrokeWidth = circleStrokeWidth;
}
/**
* Get the circle color
* @return {String} circle color
*/
getCircleColor(): string {
return this.circleBorderColor;
}
/**
* Set the circle color
* @param {String} circleBorderColor circle color
*/
setCircleColor(circleBorderColor: string): void {
this.circleBorderColor = circleBorderColor;
}
/**
* Get the circle fill color
* @return {String} circle fill color
*/
getCircleFillColor(): string {
return this.circleFillColor;
}
/**
* Set the circle fill color
* @param {String} circleFillColor circle fill color
*/
setCircleFillColor(circleFillColor: string): void {
this.circleFillColor = circleFillColor;
}
/**
* Get the circle padding percentage around the text
* @return {Number} circle padding percentage, 0.0 to 1.0
*/
getCirclePaddingPercentage(): number {
return this.circlePaddingPercentage;
}
/**
* Set the circle padding percentage to pad around the text, value between
* 0.0 and 1.0
* @param {Number} circlePaddingPercentage circle padding percentage
*/
setCirclePaddingPercentage(circlePaddingPercentage: number): void {
if (circlePaddingPercentage < 0.0 || circlePaddingPercentage > 1.0) {
throw new Error('Circle padding percentage must be between 0.0 and 1.0: ' + circlePaddingPercentage);
}
this.circlePaddingPercentage = circlePaddingPercentage;
}
/**
* Get the tile border stroke width
* @return {Number} tile border stroke width
*/
getTileBorderStrokeWidth(): number {
return this.tileBorderStrokeWidth;
}
/**
* Set the tile border stroke width
*
* @param {Number} tileBorderStrokeWidth tile border stroke width
*/
setTileBorderStrokeWidth(tileBorderStrokeWidth: number): void {
this.tileBorderStrokeWidth = tileBorderStrokeWidth;
}
/**
* Get the tile border color
* @return {String} tile border color
*/
getTileBorderColor(): string {
return this.tileBorderColor;
}
/**
* Set the tile border color
* @param {String} tileBorderColor tile border color
*/
setTileBorderColor(tileBorderColor: string): void {
this.tileBorderColor = tileBorderColor;
}
/**
* Get the tile fill color
* @return {String} tile fill color
*/
getTileFillColor(): string {
return this.tileFillColor;
}
/**
* Set the tile fill color
* @param {String} tileFillColor tile fill color
*/
setTileFillColor(tileFillColor: string): void {
this.tileFillColor = tileFillColor;
}
/**
* Is the draw unindexed tiles option enabled
* @return {Boolean} true if drawing unindexed tiles
*/
isDrawUnindexedTiles(): boolean {
return this.drawUnindexedTiles;
}
/**
* Set the draw unindexed tiles option
* @param {Boolean} drawUnindexedTiles draw unindexed tiles flag
*/
setDrawUnindexedTiles(drawUnindexedTiles: boolean): void {
this.drawUnindexedTiles = drawUnindexedTiles;
}
/**
* Get the compression format
* @return {String} the compression format (either png or jpeg)
*/
getCompressFormat(): string {
return this.compressFormat;
}
/**
* Set the compression format
* @param {String} compressFormat either 'png' or 'jpeg'
*/
setCompressFormat(compressFormat: string): void {
this.compressFormat = compressFormat;
}
/**
* Draw unindexed tile
* @param tileWidth
* @param tileHeight
* @param canvas
* @returns {Promise<String|Buffer>}
*/
async drawUnindexedTile(
tileWidth: number,
tileHeight: number,
canvas: any = null,
): Promise<string | Buffer | Uint8Array> {
let image = null;
if (this.drawUnindexedTiles) {
// Draw a tile indicating we have no idea if there are features
// inside.
// The table is not indexed and more features exist than the max
// feature count set.
image = this.drawTile(tileWidth, tileHeight, '?', canvas);
}
return image;
}
/**
* Draw a tile with the provided text label in the middle
* @param {Number} tileWidth
* @param {Number} tileHeight
* @param {String} text
* @param tileCanvas
* @return {Promise<String|Buffer>}
*/
async drawTile(
tileWidth: number,
tileHeight: number,
text: string,
tileCanvas: null,
): Promise<string | Buffer | Uint8Array> {
// eslint-disable-next-line complexity
return new Promise(resolve => {
this._registerDefaultFont();
let canvas;
if (tileCanvas !== undefined && tileCanvas !== null) {
canvas = tileCanvas;
} else {
if (CustomFeaturesTile.useNodeCanvas) {
// eslint-disable-next-line @typescript-eslint/no-var-requires
const Canvas = require('canvas');
canvas = Canvas.createCanvas(tileWidth, tileHeight);
} else {
canvas = document.createElement('canvas');
canvas.width = tileWidth;
canvas.height = tileHeight;
}
}
const context = canvas.getContext('2d');
context.clearRect(0, 0, tileWidth, tileHeight);
// Draw the tile border
if (this.tileFillColor !== null) {
context.fillStyle = this.tileFillColor;
context.fillRect(0, 0, tileWidth, tileHeight);
}
// Draw the tile border
if (this.tileBorderColor !== null) {
context.strokeStyle = this.tileBorderColor;
context.lineWidth = this.tileBorderStrokeWidth;
context.strokeRect(0, 0, tileWidth, tileHeight);
}
context.font = this.textSize + "px '" + this.textFont + "'";
const textSize = context.measureText(text);
const textWidth = textSize.width;
const textHeight = this.textSize;
// Determine the center of the tile
const centerX = Math.round(tileWidth / 2.0);
const centerY = Math.round(tileHeight / 2.0);
// Draw the circle
if (this.circleBorderColor != null || this.circleFillColor != null) {
const diameter = Math.max(textWidth, textHeight);
let radius = Math.round(diameter / 2.0);
radius = Math.round(radius + diameter * this.circlePaddingPercentage);
// Draw the circle
if (this.circleFillColor != null) {
context.fillStyle = this.circleFillColor;
context.beginPath();
context.arc(centerX, centerY, radius, 0, 2 * Math.PI, true);
context.closePath();
context.fill();
}
// Draw the circle border
if (this.circleBorderColor != null) {
context.strokeStyle = this.circleBorderColor;
context.lineWidth = this.circleStrokeWidth;
context.beginPath();
context.arc(centerX, centerY, radius, 0, 2 * Math.PI, true);
context.closePath();
context.stroke();
}
}
// Draw the text
const textX = centerX - Math.round(textWidth / 2.0);
const textY = centerY;
context.fillStyle = this.textColor;
context.textBaseline = 'middle';
context.fillText(text, textX, textY);
if (CustomFeaturesTile.useNodeCanvas) {
const writeStream = concat(function(buffer: Buffer | Uint8Array) {
resolve(buffer);
});
let stream = null;
if (this.compressFormat === 'png') {
stream = canvas.createPNGStream();
} else {
stream = canvas.createJPEGStream();
}
stream.pipe(writeStream);
} else {
resolve(canvas.toDataURL('image/' + this.compressFormat));
}
});
}
} | the_stack |
import { Component, DebugElement } from '@angular/core';
import { ComponentFixture, fakeAsync, flush, TestBed } from '@angular/core/testing';
import { FormControl, FormsModule, ReactiveFormsModule } from '@angular/forms';
import { By } from '@angular/platform-browser';
import { fastTestSetup } from '../../../../test/helpers';
import { CheckboxChange, CheckboxComponent } from './checkbox.component';
import { CheckboxModule } from './checkbox.module';
describe('browser.ui.checkbox', () => {
let fixture: ComponentFixture<TestCheckboxComponent>;
fastTestSetup();
beforeAll(async () => {
await TestBed
.configureTestingModule({
imports: [
FormsModule,
ReactiveFormsModule,
CheckboxModule,
],
declarations: [
TestCheckboxComponent,
],
})
.compileComponents();
});
beforeEach(() => {
fixture = TestBed.createComponent(TestCheckboxComponent);
fixture.detectChanges();
});
describe('basic behavior', () => {
let checkboxDebugElement: DebugElement;
let checkboxNativeElement: HTMLElement;
let checkboxInstance: CheckboxComponent;
let testComponent: TestCheckboxComponent;
let inputElement: HTMLInputElement;
let labelElement: HTMLLabelElement;
beforeEach(() => {
checkboxDebugElement = fixture.debugElement.query(By.directive(CheckboxComponent));
checkboxNativeElement = checkboxDebugElement.nativeElement;
checkboxInstance = checkboxDebugElement.componentInstance;
testComponent = fixture.debugElement.componentInstance;
inputElement = <HTMLInputElement>checkboxNativeElement.querySelector('input');
labelElement = <HTMLLabelElement>checkboxNativeElement.querySelector('label');
});
it('should add and remove the checked state', () => {
expect(checkboxInstance.checked).toBe(false);
expect(checkboxNativeElement.classList).not.toContain('Checkbox--checked');
expect(inputElement.checked).toBe(false);
testComponent.control.patchValue(true);
fixture.detectChanges();
expect(checkboxInstance.checked).toBe(true);
expect(checkboxNativeElement.classList).toContain('Checkbox--checked');
expect(inputElement.checked).toBe(true);
testComponent.control.patchValue(false);
fixture.detectChanges();
expect(checkboxInstance.checked).toBe(false);
expect(checkboxNativeElement.classList).not.toContain('Checkbox--checked');
expect(inputElement.checked).toBe(false);
});
it('should add and remove indeterminate state', () => {
expect(checkboxNativeElement.classList).not.toContain('Checkbox--checked');
expect(inputElement.checked).toBe(false);
expect(inputElement.indeterminate).toBe(false);
expect(inputElement.getAttribute('aria-checked'))
.toBe('false', 'Expect aria-checked to be false');
testComponent.isIndeterminate = true;
fixture.detectChanges();
expect(checkboxNativeElement.classList).toContain('Checkbox--indeterminate');
expect(inputElement.checked).toBe(false);
expect(inputElement.indeterminate).toBe(true);
expect(inputElement.getAttribute('aria-checked'))
.toBe('mixed', 'Expect aria checked to be mixed for indeterminate checkbox');
testComponent.isIndeterminate = false;
fixture.detectChanges();
expect(checkboxNativeElement.classList).not.toContain('Checkbox--indeterminate');
expect(inputElement.checked).toBe(false);
expect(inputElement.indeterminate).toBe(false);
});
it('should set indeterminate to false when input clicked', fakeAsync(() => {
testComponent.isIndeterminate = true;
fixture.detectChanges();
expect(checkboxInstance.indeterminate).toBe(true);
expect(inputElement.indeterminate).toBe(true);
expect(testComponent.isIndeterminate).toBe(true);
inputElement.click();
fixture.detectChanges();
// Flush the microtasks because the forms module updates the model state asynchronously.
flush();
// The checked property has been updated from the model and now the view needs
// to reflect the state change.
fixture.detectChanges();
expect(checkboxInstance.checked).toBe(true);
expect(inputElement.indeterminate).toBe(false);
expect(inputElement.checked).toBe(true);
expect(testComponent.isIndeterminate).toBe(false);
testComponent.isIndeterminate = true;
fixture.detectChanges();
expect(checkboxInstance.indeterminate).toBe(true);
expect(inputElement.indeterminate).toBe(true);
expect(inputElement.checked).toBe(true);
expect(testComponent.isIndeterminate).toBe(true);
expect(inputElement.getAttribute('aria-checked'))
.toBe('true', 'Expect aria checked to be true');
inputElement.click();
fixture.detectChanges();
// Flush the microtasks because the forms module updates the model state asynchronously.
flush();
// The checked property has been updated from the model and now the view needs
// to reflect the state change.
fixture.detectChanges();
expect(checkboxInstance.checked).toBe(false);
expect(inputElement.indeterminate).toBe(false);
expect(inputElement.checked).toBe(false);
expect(testComponent.isIndeterminate).toBe(false);
}));
it('should not set indeterminate to false when checked is set programmatically', () => {
testComponent.isIndeterminate = true;
fixture.detectChanges();
expect(checkboxInstance.indeterminate).toBe(true);
expect(inputElement.indeterminate).toBe(true);
expect(testComponent.isIndeterminate).toBe(true);
testComponent.control.patchValue(true);
fixture.detectChanges();
expect(checkboxInstance.checked).toBe(true);
expect(inputElement.indeterminate).toBe(true);
expect(inputElement.checked).toBe(true);
expect(testComponent.isIndeterminate).toBe(true);
testComponent.control.patchValue(false);
fixture.detectChanges();
expect(checkboxInstance.checked).toBe(false);
expect(inputElement.indeterminate).toBe(true);
expect(inputElement.checked).toBe(false);
expect(testComponent.isIndeterminate).toBe(true);
});
it('should change native element checked when check programmatically', () => {
expect(inputElement.checked).toBe(false);
checkboxInstance.checked = true;
fixture.detectChanges();
expect(inputElement.checked).toBe(true);
});
it('should toggle checked state on click', () => {
expect(checkboxInstance.checked).toBe(false);
labelElement.click();
fixture.detectChanges();
expect(checkboxInstance.checked).toBe(true);
labelElement.click();
fixture.detectChanges();
expect(checkboxInstance.checked).toBe(false);
});
it('should change from indeterminate to checked on click', fakeAsync(() => {
testComponent.control.patchValue(false);
testComponent.isIndeterminate = true;
fixture.detectChanges();
expect(checkboxInstance.checked).toBe(false);
expect(checkboxInstance.indeterminate).toBe(true);
checkboxInstance._onInputClick(<Event>{stopPropagation: () => {}});
// Flush the microtasks because the indeterminate state will be updated in the next tick.
flush();
expect(checkboxInstance.checked).toBe(true);
expect(checkboxInstance.indeterminate).toBe(false);
checkboxInstance._onInputClick(<Event>{stopPropagation: () => {}});
fixture.detectChanges();
expect(checkboxInstance.checked).toBe(false);
expect(checkboxInstance.indeterminate).toBe(false);
flush();
}));
it('should add and remove disabled state', () => {
expect(checkboxInstance.disabled).toBe(false);
expect(checkboxNativeElement.classList).not.toContain('Checkbox--disabled');
expect(inputElement.tabIndex).toBe(0);
expect(inputElement.disabled).toBe(false);
testComponent.control.disable();
fixture.detectChanges();
expect(checkboxInstance.disabled).toBe(true);
expect(checkboxNativeElement.classList).toContain('Checkbox--disabled');
expect(inputElement.disabled).toBe(true);
testComponent.control.enable();
fixture.detectChanges();
expect(checkboxInstance.disabled).toBe(false);
expect(checkboxNativeElement.classList).not.toContain('Checkbox--disabled');
expect(inputElement.tabIndex).toBe(0);
expect(inputElement.disabled).toBe(false);
});
it('should not toggle `checked` state upon interation while disabled', () => {
testComponent.control.disable();
fixture.detectChanges();
checkboxNativeElement.click();
expect(checkboxInstance.checked).toBe(false);
});
it('should overwrite indeterminate state when clicked', fakeAsync(() => {
testComponent.isIndeterminate = true;
fixture.detectChanges();
inputElement.click();
fixture.detectChanges();
// Flush the microtasks because the indeterminate state will be updated in the next tick.
flush();
expect(checkboxInstance.checked).toBe(true);
expect(checkboxInstance.indeterminate).toBe(false);
}));
it('should preserve the user-provided id', () => {
expect(checkboxNativeElement.id).toBe('simple-check');
expect(inputElement.id).toBe('simple-check-input');
});
it('should project the checkbox content into the label element', () => {
const label = <HTMLLabelElement>checkboxNativeElement.querySelector('.Checkbox__label');
expect(label.textContent.trim()).toBe('Simple checkbox');
});
it('should make the host element a tab stop', () => {
expect(inputElement.tabIndex).toBe(0);
});
it('should not trigger the click event multiple times', () => {
// By default, when clicking on a label element, a generated click will be dispatched
// on the associated input element.
// Since we're using a label element and a visual hidden input, this behavior can led
// to an issue, where the click events on the checkbox are getting executed twice.
spyOn(testComponent, 'onCheckboxClick');
expect(inputElement.checked).toBe(false);
expect(checkboxNativeElement.classList).not.toContain('Checkbox--checked');
labelElement.click();
fixture.detectChanges();
expect(checkboxNativeElement.classList).toContain('Checkbox--checked');
expect(inputElement.checked).toBe(true);
expect(testComponent.onCheckboxClick).toHaveBeenCalledTimes(1);
});
it('should trigger a change event when the native input does', fakeAsync(() => {
spyOn(testComponent, 'onCheckboxChange');
expect(inputElement.checked).toBe(false);
expect(checkboxNativeElement.classList).not.toContain('Checkbox--checked');
labelElement.click();
fixture.detectChanges();
expect(inputElement.checked).toBe(true);
expect(checkboxNativeElement.classList).toContain('Checkbox--checked');
fixture.detectChanges();
flush();
// The change event shouldn't fire, because the value change was not caused
// by any interaction.
expect(testComponent.onCheckboxChange).toHaveBeenCalledTimes(1);
}));
it('should not trigger the change event by changing the native value', fakeAsync(() => {
spyOn(testComponent, 'onCheckboxChange');
expect(inputElement.checked).toBe(false);
expect(checkboxNativeElement.classList).not.toContain('Checkbox--checked');
testComponent.control.patchValue(true);
fixture.detectChanges();
expect(inputElement.checked).toBe(true);
expect(checkboxNativeElement.classList).toContain('Checkbox--checked');
fixture.detectChanges();
flush();
// The change event shouldn't fire, because the value change was not caused
// by any interaction.
expect(testComponent.onCheckboxChange).not.toHaveBeenCalled();
}));
it('should focus on underlying input element when focus() is called', () => {
expect(document.activeElement).not.toBe(inputElement);
checkboxInstance.focus();
fixture.detectChanges();
expect(document.activeElement).toBe(inputElement);
});
it('should forward the value to input element', () => {
testComponent.checkboxValue = 'basic_checkbox';
fixture.detectChanges();
expect(inputElement.value).toBe('basic_checkbox');
});
it('should remove the SVG checkmark from the tab order', () => {
expect(checkboxNativeElement.querySelector('svg').getAttribute('focusable')).toBe('false');
});
});
});
@Component({
template: `
<div (click)="parentElementClicked = true" (keyup)="parentElementKeyedUp = true">
<gd-checkbox
[id]="checkboxId"
[formControl]="control"
[(indeterminate)]="isIndeterminate"
[value]="checkboxValue"
(click)="onCheckboxClick($event)"
(change)="onCheckboxChange($event)">
Simple checkbox
</gd-checkbox>
</div>
`,
})
class TestCheckboxComponent {
readonly control = new FormControl(false);
isIndeterminate: boolean = false;
parentElementClicked: boolean = false;
parentElementKeyedUp: boolean = false;
checkboxId: string | null = 'simple-check';
checkboxValue: string = 'single_checkbox';
/* tslint:disable */
onCheckboxClick: (event?: Event) => void = () => {
};
onCheckboxChange: (event?: CheckboxChange) => void = () => {
};
/* tslint:enable */
} | the_stack |
import { getTestConfiguration } from '../../test-utils/config';
import {
getTestRequestRepository,
getTestNetworkService,
} from '../../test-utils/infrastructure';
import { RequestRepository } from '../repository';
import { NetworkService } from '../service';
import { Response, Request } from '../entity';
import { wait } from '../../utils/timers';
import { RespondToRequest } from './RespondToRequest';
jest.mock('../../utils/timers');
let requestRepository: RequestRepository;
let networkService: NetworkService;
beforeEach(() => {
(wait as jest.Mock).mockReset();
requestRepository = getTestRequestRepository();
networkService = getTestNetworkService();
});
describe('when the response is in the cache', () => {
beforeEach(() => {
(requestRepository.getResponseByRequestId as jest.Mock).mockResolvedValue(
new Response(
200,
{ 'cache-control': 'something' },
Buffer.from('some body'),
66
)
);
});
it('should not simulate the response time when useRealResponseTime is set to false', async () => {
// Given
const useCase = new RespondToRequest({
requestRepository,
networkService,
config: getTestConfiguration({ useRealResponseTime: false }),
});
const method = 'GET';
const url = '/beers/1';
const headers = { authorization: 'Bearer token' };
const body = 'beer information';
// When
await useCase.execute(method, url, headers, body);
//Then
expect(wait).not.toHaveBeenCalled();
});
it('should return the response from the cache, without using the network', async () => {
// Given
const useCase = new RespondToRequest({
requestRepository,
networkService,
config: getTestConfiguration({ useRealResponseTime: true }),
});
const method = 'GET';
const url = '/beers/1';
const headers = { authorization: 'Bearer token' };
const body = 'beer information';
// When
const response = await useCase.execute(method, url, headers, body);
//Then
expect(response).toEqual(
new Response(
200,
{ 'cache-control': 'something' },
Buffer.from('some body'),
66
)
);
expect(requestRepository.getResponseByRequestId).toHaveBeenCalledTimes(1);
expect(requestRepository.getResponseByRequestId).toHaveBeenCalledWith(
new Request(method, url, headers, body).id
);
expect(wait).toHaveBeenCalledTimes(1);
expect(wait).toHaveBeenCalledWith(66);
expect(networkService.executeRequest).not.toHaveBeenCalled();
});
});
describe('when no response is in the cache', () => {
beforeEach(() => {
(requestRepository.getResponseByRequestId as jest.Mock).mockResolvedValue(
null
);
(networkService.executeRequest as jest.Mock).mockResolvedValue(
new Response(
200,
{ 'cache-control': 'something' },
Buffer.from('some body'),
66
)
);
});
it('should fetch the reponse from the network and store it in the cache', async () => {
const useCase = new RespondToRequest({
requestRepository,
networkService,
config: getTestConfiguration({
useRealResponseTime: true,
disableCachingPatterns: [
{
method: 'POST',
urlPattern: '/beers/1',
},
],
}),
});
const method = 'GET';
const url = '/beers/1';
const headers = { authorization: 'Bearer token' };
const body = 'beer information';
// When
const response = await useCase.execute(method, url, headers, body);
//Then
expect(response).toEqual(
new Response(
200,
{ 'cache-control': 'something' },
Buffer.from('some body'),
66
)
);
expect(networkService.executeRequest).toHaveBeenCalledTimes(1);
expect(networkService.executeRequest).toHaveBeenCalledWith(
new Request(method, url, headers, body)
);
expect(wait).toHaveBeenCalledTimes(1);
expect(wait).toHaveBeenCalledWith(66);
expect(requestRepository.persistResponseForRequest).toHaveBeenCalledTimes(
1
);
expect(requestRepository.persistResponseForRequest).toHaveBeenCalledWith(
new Request(method, url, headers, body),
new Response(
200,
{ 'cache-control': 'something' },
Buffer.from('some body'),
66
)
);
});
});
describe('when caching is disabled for the method and url', () => {
beforeEach(() => {
(networkService.executeRequest as jest.Mock).mockResolvedValue(
new Response(
200,
{ 'cache-control': 'something' },
Buffer.from('some body'),
66
)
);
});
it('should not cache the response (1 matching pattern)', async () => {
// Given
const useCase = new RespondToRequest({
networkService,
requestRepository,
config: getTestConfiguration({
disableCachingPatterns: [
{
method: 'post',
urlPattern: '/pokemon/ditto',
},
],
}),
});
const method = 'POST';
const url = '/pokemon/ditto';
// When
const response = await useCase.execute(method, url, {}, '');
//Then
expect(requestRepository.persistResponseForRequest).not.toHaveBeenCalled();
expect(requestRepository.getResponseByRequestId).not.toHaveBeenCalled();
expect(response).toEqual(
new Response(
200,
{ 'cache-control': 'something' },
Buffer.from('some body'),
66
)
);
});
it('should not cache the response (3 patterns / 1 matching pattern)', async () => {
// Given
const useCase = new RespondToRequest({
networkService,
requestRepository,
config: getTestConfiguration({
disableCachingPatterns: [
{
method: 'POST',
urlPattern: '/pokemon/ditto',
},
{
method: 'get',
urlPattern: '/pokemon/ditto',
},
{
method: 'post',
urlPattern: '/pokemon/ditto?format=true',
},
],
}),
});
const method = 'POST';
const url = '/pokemon/ditto';
// When
const response = await useCase.execute(method, url, {}, '');
//Then
expect(requestRepository.persistResponseForRequest).not.toHaveBeenCalled();
expect(requestRepository.getResponseByRequestId).not.toHaveBeenCalled();
expect(response).toEqual(
new Response(
200,
{ 'cache-control': 'something' },
Buffer.from('some body'),
66
)
);
});
it('should not cache the response (glob style)', async () => {
// Given
const useCase = new RespondToRequest({
networkService,
requestRepository,
config: getTestConfiguration({
disableCachingPatterns: [
{
method: 'post',
urlPattern: '/pokemon/ditto*',
},
],
}),
});
const method = 'POST';
const url = '/pokemon/ditto?format=true';
// When
const response = await useCase.execute(method, url, {}, '');
//Then
expect(requestRepository.persistResponseForRequest).not.toHaveBeenCalled();
expect(requestRepository.getResponseByRequestId).not.toHaveBeenCalled();
expect(response).toEqual(
new Response(
200,
{ 'cache-control': 'something' },
Buffer.from('some body'),
66
)
);
});
it('should not cache the response (glob style)', async () => {
// Given
const useCase = new RespondToRequest({
networkService,
requestRepository,
config: getTestConfiguration({
disableCachingPatterns: [
{
method: 'get',
urlPattern: '/pokemon/mew',
},
],
}),
});
const method = 'GET';
const url = '/pokemon/mew/';
// When
const response = await useCase.execute(method, url, {}, '');
//Then
expect(requestRepository.persistResponseForRequest).not.toHaveBeenCalled();
expect(requestRepository.getResponseByRequestId).not.toHaveBeenCalled();
expect(response).toEqual(
new Response(
200,
{ 'cache-control': 'something' },
Buffer.from('some body'),
66
)
);
});
it('should not cache the response (nested route)', async () => {
// Given
const useCase = new RespondToRequest({
networkService,
requestRepository,
config: getTestConfiguration({
disableCachingPatterns: [
{
method: 'get',
urlPattern: '/pokemon/mew/**/*',
},
],
}),
});
const method = 'GET';
const url = '/pokemon/mew/abilities/2/stats';
// When
const response = await useCase.execute(method, url, {}, '');
//Then
expect(requestRepository.persistResponseForRequest).not.toHaveBeenCalled();
expect(requestRepository.getResponseByRequestId).not.toHaveBeenCalled();
expect(response).toEqual(
new Response(
200,
{ 'cache-control': 'something' },
Buffer.from('some body'),
66
)
);
});
it('should not cache the response (nested route)', async () => {
// Given
const useCase = new RespondToRequest({
networkService,
requestRepository,
config: getTestConfiguration({
disableCachingPatterns: [
{
method: 'get',
urlPattern: '/pokemon/*/sprites/**',
},
{
method: 'post',
urlPattern: '/pokemon/*/sprites/**',
},
],
}),
});
const method = 'GET';
const url = '/pokemon/mew/sprites/2/back';
// When
const response = await useCase.execute(method, url, {}, '');
//Then
expect(requestRepository.persistResponseForRequest).not.toHaveBeenCalled();
expect(requestRepository.getResponseByRequestId).not.toHaveBeenCalled();
expect(response).toEqual(
new Response(
200,
{ 'cache-control': 'something' },
Buffer.from('some body'),
66
)
);
});
}); | the_stack |
import React, { CSSProperties, Component, createRef } from 'react';
import { on, off } from './utils/events';
import { getScrollWidth } from './utils/getScrollWidth';
import { clearSelection } from './utils/clearSelection';
import { stylesFactory } from './styles';
import { scrollTo } from './modules/scrollTo';
import { mouseWithoutWindow } from './modules/mouse.without.window';
import { generateStyle } from './utils/generateStyle';
import { generateID } from './utils/generateID';
import {
isObject,
isDefined,
isClient,
isFunction
} from './utils/is';
import { State } from './types/state';
import { Props } from './types/props';
mouseWithoutWindow();
/**
* This is min height for Scroll Bar.
* If children content will be very big
* Scroll Bar stay 20 pixels
* */
const minHeightScrollBar = 20;
const defaultScrollWidth = 17;
const REINIT_MS = 250;
const SCROLL_WIDTH = getScrollWidth();
// If this is Safari / iPhone / iPad or other browser / device with scrollWidth === 0
const isZero = SCROLL_WIDTH === 0;
const getDefaultScrollWidth = (): number => (
typeof SCROLL_WIDTH === 'number' ?
SCROLL_WIDTH :
0
);
class CustomScroll extends Component<Props, State> {
private nextWrapperHeight = 0;
private nextHolderHeight = 0;
private scrollID: string = generateID();
private readonly isVirtualized: boolean;
private readonly interval;
private scrollRun: (e) => void;
private scrollBlock: HTMLDivElement;
private customScroll: HTMLDivElement;
private customScrollHolder: HTMLDivElement;
private endScroll: () => void;
private customScrollHolderRef: {
current: HTMLDivElement;
};
private customScrollRef: {
current: HTMLDivElement;
};
private customScrollFrameRef: {
current: HTMLDivElement;
};
private scrollBarRef: {
current: HTMLDivElement;
};
constructor(props) {
super(props);
['scroll-area', 'scroll-area-holder', 'scrollBar', 'customScroll', 'customScrollHolder', 'customScrollFrame'].forEach(r => {
this[`${r}Ref`] = createRef();
});
let scrollWidth = getDefaultScrollWidth();
this.isVirtualized = isObject(props.virtualized);
if (isZero) {
scrollWidth = defaultScrollWidth;
}
const className = isDefined(props.className) ? props.className : 'react-customscroll';
/**
* Reinitialize scroll bar every 500 ms
* */
this.interval = setInterval(this.reinit, REINIT_MS);
this.state = {
scrollTop: 0,
width: `calc(100% + ${scrollWidth}px)`,
selection: true,
scrollAreaShow: false,
animate: props.animate || true,
classes: {
base: className,
holder: `${className}-holder`,
frame: `${className}-frame`,
area: `${className}-scrollbar-area`,
'area-holder': `${className}-scrollbar-holder`,
'scroll-bar': `${className}-scrollbar`,
},
virtualState: this.isVirtualized ? this.getScrollBarStyles(props.scrollTo || 0) : null,
styles: {
scrollArea: {},
scrollAreaFrame: {},
scrollBar: {},
ctmScroll: {},
ctmScrollActive: {},
ctmScrollFrame: {},
noselect: {},
ctmScrollHolder: {}
}
};
if (isClient() && !document.getElementById(this.scrollID)) {
generateStyle(`#${this.scrollID}::-webkit-scrollbar { opacity: 0 }
#${this.scrollID}::-webkit-scrollbar-track-piece { background-color: transparent }`, this.scrollID);
}
}
componentDidMount(): void {
/**
* If mouse cursor gone outside window
* Will trigger event 'mouseWithoutWindow'
* And all listeners will remove
* Content in scroll block will be selectable
* */
on(document, ['mouseWithoutWindow'], this.reset);
on(window, ['resize'], this.restScrollAfterResize);
this.scrollBlock = this.customScrollHolderRef.current;
this.customScroll = this.customScrollRef.current;
this.customScrollHolder = this.customScrollFrameRef.current;
this.applyStyles();
}
componentDidUpdate(prevProps): void {
let offsetY = this.props.scrollTo;
// eslint-disable-next-line sonarjs/no-collapsible-if,no-restricted-globals
if (isDefined(offsetY) && !isNaN(offsetY)) {
if (prevProps.scrollTo !== offsetY) {
if (this.isVirtualized) {
offsetY = offsetY || 0;
setTimeout(() => {
this.setState({
virtualState: this.getScrollBarStyles(offsetY)
});
});
} else {
scrollTo(this.scrollBlock, offsetY, this.state.animate);
}
}
}
}
componentWillUnmount(): void {
if (isClient()) {
const el = document.getElementById(this.scrollID);
if (el) {
el.parentNode.removeChild(el);
}
}
clearInterval(this.interval);
this.removeListeners();
}
onClick = (evt): void => {
evt.stopPropagation();
evt.preventDefault();
/**
* If we clicked right mouse button we must skip this event
* */
let isRightMB;
if ('which' in evt) {
isRightMB = evt.which === 3;
} else if ('button' in evt) {
isRightMB = evt.button === 2;
}
if (isRightMB) {
setTimeout(this.reset);
return;
}
const elem = this.scrollBlock;
const startPoint = evt.touches ? evt.touches[0].pageY : evt.pageY;
const scrollTopOffset = this.isVirtualized ? (this.props.scrollTo || 0) : elem.scrollTop;
this.blockSelection(false);
this.scrollRun = (e) => {
e.stopPropagation();
e.preventDefault();
const { holderHeight, wrapperHeight } = this.getParams();
const diff = holderHeight / wrapperHeight;
const pageY = e.touches ? e.touches[0].pageY : e.pageY;
if (this.isVirtualized) {
let scrollTop = ((pageY - startPoint) * diff) + scrollTopOffset;
scrollTop = holderHeight - wrapperHeight <= scrollTop ? holderHeight - wrapperHeight : scrollTop;
if (isFunction(this.props.scrollSync)) {
this.props.scrollSync(scrollTop);
}
} else {
scrollTo(elem, ((pageY - startPoint) * diff) + scrollTopOffset);
}
};
this.endScroll = () => {
this.reset();
};
on(document, ['mouseup', 'touchend'], this.endScroll);
on(document, ['mousemove', 'touchmove'], this.scrollRun);
};
getParams(): { wrapperHeight: number; holderHeight: number; percentDiff: number; height: number } {
let wrapperHeight = 0;
let holderHeight = 0;
let percentDiff = 0;
let height = 0;
if (!isClient()) {
return {
wrapperHeight,
holderHeight,
percentDiff,
height
};
}
const scrollArea = this['scroll-areaRef'].current;
const paddings = window && scrollArea ?
parseFloat(window.getComputedStyle(scrollArea, null)
.getPropertyValue('padding-top')) +
parseFloat(window.getComputedStyle(scrollArea, null)
.getPropertyValue('padding-bottom')) :
0;
if (this.isVirtualized) {
wrapperHeight = this.props.virtualized.height || 0;
holderHeight = this.props.virtualized.scrollHeight || 0;
} else {
wrapperHeight = this.customScroll && this.customScroll.offsetHeight;
holderHeight = this.customScroll && this.customScrollHolder.offsetHeight;
}
if (holderHeight === 0) {
height = 0;
percentDiff = 0;
} else {
percentDiff = (wrapperHeight - paddings) / holderHeight;
height = wrapperHeight * percentDiff;
}
return {
wrapperHeight: Math.ceil(wrapperHeight),
holderHeight,
percentDiff,
height
};
}
getScrollBarStyles(offsetY = 0): { top: number; height: number } {
const { holderHeight, percentDiff, height } = this.getParams();
if (holderHeight === 0 && percentDiff === 0 && height === 0) {
return {
top: 0,
height: 0
};
}
const scrollTop = this.isVirtualized ? offsetY : this.state.scrollTop || this.scrollBlock.scrollTop;
const newPercentDiff = height < minHeightScrollBar ?
percentDiff - ((minHeightScrollBar - height) / holderHeight) :
percentDiff;
const scrollBarHeight = height < minHeightScrollBar ? minHeightScrollBar : height;
return {
top: scrollTop * newPercentDiff,
height: scrollBarHeight
};
}
setY(value): void {
scrollTo(this.scrollBlock, value, this.state.animate);
}
reset = (): void => {
this.removeListeners();
this.blockSelection(true);
};
restScrollAfterResize = (): void => {
this.nextWrapperHeight = 0;
this.nextHolderHeight = 0;
};
jump = (e): void => {
const y = e.touches ? e.touches[0].pageY : e.pageY;
let scrollBar = this.scrollBarRef.current as { offsetTop: number; offsetHeight: number };
let scrollPosition = this.scrollBlock.scrollTop;
const { wrapperHeight } = this.getParams();
const topOffset = this.scrollBlock.getBoundingClientRect().top;
if (this.isVirtualized) {
scrollPosition = this.props.scrollTo || 0;
scrollBar = {
offsetTop: this.state.virtualState.top,
offsetHeight: this.state.virtualState.height
};
}
if (y < (topOffset + scrollBar.offsetTop) ||
y > (topOffset + scrollBar.offsetTop + scrollBar.offsetHeight)) {
const offset = topOffset + scrollBar.offsetTop <= y ? 1 : -1;
const scrollY = (scrollPosition + (wrapperHeight * offset));
if (this.isVirtualized) {
if (isFunction(this.props.scrollSync)) {
this.props.scrollSync(scrollY);
}
} else {
scrollTo(this.scrollBlock, scrollY);
}
}
};
reinit = (): void => {
const { wrapperHeight, holderHeight } = this.getParams();
if ((wrapperHeight !== this.nextWrapperHeight) ||
(holderHeight !== this.nextHolderHeight)) {
if (this.isVirtualized) {
const scrollPosition = this.props.scrollTo || 0;
const virtualState = this.getScrollBarStyles(scrollPosition);
this.setState({
virtualState,
scrollAreaShow: holderHeight > wrapperHeight
});
} else {
this.setState({
scrollAreaShow: holderHeight > wrapperHeight
});
}
}
this.nextWrapperHeight = wrapperHeight;
this.nextHolderHeight = holderHeight;
};
scroll = (): void => {
this.setState({
scrollTop: this.scrollBlock.scrollTop
});
};
removeListeners(): void {
off(document, ['mouseWithoutWindow'], this.reset);
off(window, ['resize'], this.restScrollAfterResize);
off(document, ['mouseup', 'touchend'], this.endScroll);
off(document, ['mousemove', 'touchmove'], this.scrollRun);
}
blockSelection(state): void {
if (!state) {
clearSelection();
}
this.setState({ selection: !!state });
}
applyStyles(): void {
const scrollWidth = getDefaultScrollWidth();
this.setState(state => (
Object.assign(state, {
styles: stylesFactory({
virtualized: this.isVirtualized,
isZero,
originalScrollWidth: scrollWidth,
scrollWidth: isDefined(this.props.scrollWidth) ? this.props.scrollWidth : '6px',
scrollAreaColor: isDefined(this.props.scrollAreaColor) ? this.props.scrollAreaColor : '#494949',
scrollBarRadius: isDefined(this.props.scrollBarRadius) ? this.props.scrollBarRadius : '6px',
scrollBarColor: isDefined(this.props.scrollBarColor) ? this.props.scrollBarColor : '#aeaeae'
}, !!this.props.rtl)
})
));
}
render(): JSX.Element {
const ctmScroll = !this.state.selection ?
Object.assign({}, this.state.styles.ctmScroll, this.state.styles.noselect) :
this.state.styles.ctmScroll;
const ctmScrollFrame = this.state.scrollAreaShow ?
Object.assign({}, this.state.styles.ctmScrollFrame, this.state.styles.ctmScrollActive) :
this.state.styles.ctmScrollFrame;
return (
<div
ref={this.customScrollRef}
style={Object.assign({ boxSizing: 'border-box' }, ctmScroll) as CSSProperties}
className={this.state.classes.base}
suppressHydrationWarning
>
<div
ref={this.customScrollHolderRef}
style={Object.assign({ boxSizing: 'border-box' }, { width: this.state.width }, this.state.styles.ctmScrollHolder) as CSSProperties}
onScroll={this.scroll}
className={this.state.classes.holder}
id={this.scrollID}
suppressHydrationWarning
>
<div
ref={this.customScrollFrameRef}
style={Object.assign({ boxSizing: 'border-box' }, ctmScrollFrame, isZero ?
{ width: '100%' } : {}) as CSSProperties}
className={this.state.classes.frame}
suppressHydrationWarning
>
{isFunction(this.props.children) ?
this.props.children(this.scrollBlock && this.scrollBlock.scrollTop ?
this.scrollBlock.scrollTop :
0) :
this.props.children}
</div>
{this.state.scrollAreaShow ? (
<div
ref={this['scroll-areaRef']}
style={this.state.styles.scrollArea}
onClick={this.jump}
className={this.state.classes.area}
>
<div
ref={this['scroll-area-holderRef']}
style={this.state.styles.scrollAreaFrame}
className={this.state.classes['area-holder']}
>
<div
ref={this.scrollBarRef}
style={
Object.assign({}, this.state.styles.scrollBar, this.isVirtualized ?
this.state.virtualState :
this.getScrollBarStyles())
}
onMouseDown={this.onClick}
onTouchStart={this.onClick}
className={this.state.classes['scroll-bar']}
/>
</div>
</div>
) : null}
</div>
</div>
);
}
}
export { CustomScroll, getDefaultScrollWidth }; | the_stack |
import { BaseResource, CloudError, AzureServiceClientOptions } from "@azure/ms-rest-azure-js";
import * as msRest from "@azure/ms-rest-js";
export { BaseResource, CloudError };
/**
* @interface
* An interface representing ErrorResponse.
* The error response send when an operation fails.
*
*/
export interface ErrorResponse {
/**
* @member {string} code error code
*/
code: string;
/**
* @member {string} message error message
*/
message: string;
}
/**
* @interface
* An interface representing Resource.
* An Azure resource.
*
* @extends BaseResource
*/
export interface Resource extends BaseResource {
/**
* @member {string} [id] The resource ID.
* **NOTE: This property will not be serialized. It can only be populated by
* the server.**
*/
readonly id?: string;
/**
* @member {string} [name] The name of the resource.
* **NOTE: This property will not be serialized. It can only be populated by
* the server.**
*/
readonly name?: string;
/**
* @member {string} [type] The type of the resource.
* **NOTE: This property will not be serialized. It can only be populated by
* the server.**
*/
readonly type?: string;
/**
* @member {string} location The location of the resource. This cannot be
* changed after the resource is created.
*/
location: string;
/**
* @member {{ [propertyName: string]: string }} [tags] The tags of the
* resource.
*/
tags?: { [propertyName: string]: string };
}
/**
* @interface
* An interface representing Workspace.
* An object that represents a machine learning workspace.
*
* @extends Resource
*/
export interface Workspace extends Resource {
/**
* @member {string} userStorageAccountId The fully qualified arm id of the
* storage account associated with this workspace.
*/
userStorageAccountId: string;
/**
* @member {string} [ownerEmail] The email id of the owner for this
* workspace.
*/
ownerEmail?: string;
/**
* @member {WorkspaceType} [workspaceType] The type of this workspace.
* Possible values include: 'Production', 'Free', 'Anonymous',
* 'PaidStandard', 'PaidPremium'
* **NOTE: This property will not be serialized. It can only be populated by
* the server.**
*/
readonly workspaceType?: WorkspaceType;
/**
* @member {WorkspaceState} [workspaceState] The current state of workspace
* resource. Possible values include: 'Deleted', 'Enabled', 'Disabled',
* 'Migrated', 'Updated', 'Registered', 'Unregistered'
* **NOTE: This property will not be serialized. It can only be populated by
* the server.**
*/
readonly workspaceState?: WorkspaceState;
/**
* @member {string} [workspaceId] The immutable id associated with this
* workspace.
* **NOTE: This property will not be serialized. It can only be populated by
* the server.**
*/
readonly workspaceId?: string;
/**
* @member {string} [creationTime] The creation time for this workspace
* resource.
* **NOTE: This property will not be serialized. It can only be populated by
* the server.**
*/
readonly creationTime?: string;
/**
* @member {string} [studioEndpoint] The regional endpoint for the machine
* learning studio service which hosts this workspace.
* **NOTE: This property will not be serialized. It can only be populated by
* the server.**
*/
readonly studioEndpoint?: string;
/**
* @member {string} [keyVaultIdentifierId] The key vault identifier used for
* encrypted workspaces.
*/
keyVaultIdentifierId?: string;
}
/**
* @interface
* An interface representing WorkspaceUpdateParameters.
* The parameters for updating a machine learning workspace.
*
*/
export interface WorkspaceUpdateParameters {
/**
* @member {{ [propertyName: string]: string }} [tags] The resource tags for
* the machine learning workspace.
*/
tags?: { [propertyName: string]: string };
/**
* @member {WorkspaceState} [workspaceState] The current state of workspace
* resource. Possible values include: 'Deleted', 'Enabled', 'Disabled',
* 'Migrated', 'Updated', 'Registered', 'Unregistered'
*/
workspaceState?: WorkspaceState;
/**
* @member {string} [keyVaultIdentifierId] The key vault identifier used for
* encrypted workspaces.
*/
keyVaultIdentifierId?: string;
}
/**
* @interface
* An interface representing WorkspaceKeysResponse.
* Workspace authorization keys for a workspace.
*
*/
export interface WorkspaceKeysResponse {
/**
* @member {string} [primaryToken] Primary authorization key for this
* workspace.
*/
primaryToken?: string;
/**
* @member {string} [secondaryToken] Secondary authorization key for this
* workspace.
*/
secondaryToken?: string;
}
/**
* @interface
* An interface representing OperationDisplay.
* Display name of operation
*
*/
export interface OperationDisplay {
/**
* @member {string} [provider] The resource provider name:
* Microsoft.MachineLearning
*/
provider?: string;
/**
* @member {string} [resource] The resource on which the operation is
* performed.
*/
resource?: string;
/**
* @member {string} [operation] The operation that users can perform.
*/
operation?: string;
/**
* @member {string} [description] The description for the operation.
*/
description?: string;
}
/**
* @interface
* An interface representing Operation.
* Azure Machine Learning Studio REST API operation
*
*/
export interface Operation {
/**
* @member {string} [name] Operation name: {provider}/{resource}/{operation}
*/
name?: string;
/**
* @member {OperationDisplay} [display] Display name of operation
*/
display?: OperationDisplay;
}
/**
* @interface
* An interface representing MachineLearningWorkspacesManagementClientOptions.
* @extends AzureServiceClientOptions
*/
export interface MachineLearningWorkspacesManagementClientOptions extends AzureServiceClientOptions {
/**
* @member {string} [baseUri]
*/
baseUri?: string;
}
/**
* @interface
* An interface representing the OperationListResult.
* An array of operations supported by the resource provider.
*
* @extends Array<Operation>
*/
export interface OperationListResult extends Array<Operation> {
}
/**
* @interface
* An interface representing the WorkspaceListResult.
* The result of a request to list machine learning workspace keys.
*
* @extends Array<Workspace>
*/
export interface WorkspaceListResult extends Array<Workspace> {
/**
* @member {string} [nextLink] The URI that can be used to request the next
* list of machine learning workspaces.
*/
nextLink?: string;
}
/**
* Defines values for WorkspaceType.
* Possible values include: 'Production', 'Free', 'Anonymous', 'PaidStandard', 'PaidPremium'
* @readonly
* @enum {string}
*/
export type WorkspaceType = 'Production' | 'Free' | 'Anonymous' | 'PaidStandard' | 'PaidPremium';
/**
* Defines values for WorkspaceState.
* Possible values include: 'Deleted', 'Enabled', 'Disabled', 'Migrated', 'Updated', 'Registered',
* 'Unregistered'
* @readonly
* @enum {string}
*/
export type WorkspaceState = 'Deleted' | 'Enabled' | 'Disabled' | 'Migrated' | 'Updated' | 'Registered' | 'Unregistered';
/**
* Contains response data for the list operation.
*/
export type OperationsListResponse = OperationListResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: OperationListResult;
};
};
/**
* Contains response data for the get operation.
*/
export type WorkspacesGetResponse = Workspace & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: Workspace;
};
};
/**
* Contains response data for the createOrUpdate operation.
*/
export type WorkspacesCreateOrUpdateResponse = Workspace & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: Workspace;
};
};
/**
* Contains response data for the update operation.
*/
export type WorkspacesUpdateResponse = Workspace & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: Workspace;
};
};
/**
* Contains response data for the listWorkspaceKeys operation.
*/
export type WorkspacesListWorkspaceKeysResponse = WorkspaceKeysResponse & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: WorkspaceKeysResponse;
};
};
/**
* Contains response data for the listByResourceGroup operation.
*/
export type WorkspacesListByResourceGroupResponse = WorkspaceListResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: WorkspaceListResult;
};
};
/**
* Contains response data for the list operation.
*/
export type WorkspacesListResponse = WorkspaceListResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: WorkspaceListResult;
};
};
/**
* Contains response data for the listByResourceGroupNext operation.
*/
export type WorkspacesListByResourceGroupNextResponse = WorkspaceListResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: WorkspaceListResult;
};
};
/**
* Contains response data for the listNext operation.
*/
export type WorkspacesListNextResponse = WorkspaceListResult & {
/**
* The underlying HTTP response.
*/
_response: msRest.HttpResponse & {
/**
* The response body as text (string format)
*/
bodyAsText: string;
/**
* The response body as parsed JSON or XML
*/
parsedBody: WorkspaceListResult;
};
}; | the_stack |
import BigNumber from 'bignumber.js'
import { WyvernProtocol } from 'wyvern-js'
import * as ethUtil from 'ethereumjs-util'
import * as _ from 'lodash'
import * as Web3 from 'web3'
import {
AnnotatedFunctionABI,
FunctionInputKind,
FunctionOutputKind,
Schema,
StateMutability
} from 'wyvern-schemas/dist/types'
import { ERC1155 } from '../contracts'
import { OpenSeaPort } from '..'
import {
Asset,
AssetContractType,
AssetEvent,
ECSignature,
OpenSeaAccount,
OpenSeaAsset,
OpenSeaAssetBundle,
OpenSeaAssetContract,
OpenSeaCollection,
OpenSeaFungibleToken,
OpenSeaTraitStats, OpenSeaUser,
Order,
OrderJSON,
OrderSide,
SaleKind,
Transaction,
TxnCallback,
UnhashedOrder,
UnsignedOrder,
Web3Callback,
WyvernAsset,
WyvernBundle,
WyvernFTAsset,
WyvernNFTAsset,
WyvernSchemaName
} from '../types'
import {
ENJIN_ADDRESS,
ENJIN_COIN_ADDRESS,
INVERSE_BASIS_POINT,
NULL_ADDRESS,
NULL_BLOCK_HASH
} from '../constants'
import { proxyABI } from '../abi/Proxy'
export {
WyvernProtocol
}
export const annotateERC721TransferABI = (asset: WyvernNFTAsset): AnnotatedFunctionABI => ({
"constant": false,
"inputs": [
{
"name": "_to",
"type": "address",
"kind": FunctionInputKind.Replaceable
},
{
"name": "_tokenId",
"type": "uint256",
"kind": FunctionInputKind.Asset,
"value": asset.id
}
],
"target": asset.address,
"name": "transfer",
"outputs": [],
"payable": false,
"stateMutability": StateMutability.Nonpayable,
"type": Web3.AbiType.Function
})
export const annotateERC20TransferABI = (asset: WyvernFTAsset): AnnotatedFunctionABI => ({
"constant": false,
"inputs": [
{
"name": "_to",
"type": "address",
"kind": FunctionInputKind.Replaceable
},
{
"name": "_amount",
"type": "uint256",
"kind": FunctionInputKind.Count,
"value": asset.quantity
}
],
"target": asset.address,
"name": "transfer",
"outputs": [
{
"name": "success",
"type": "bool",
"kind": FunctionOutputKind.Other
}
],
"payable": false,
"stateMutability": StateMutability.Nonpayable,
"type": Web3.AbiType.Function
})
const SCHEMA_NAME_TO_ASSET_CONTRACT_TYPE: {[key in WyvernSchemaName]: AssetContractType} = {
[WyvernSchemaName.ERC721]: AssetContractType.NonFungible,
[WyvernSchemaName.ERC1155]: AssetContractType.SemiFungible,
[WyvernSchemaName.ERC20]: AssetContractType.Fungible,
[WyvernSchemaName.LegacyEnjin]: AssetContractType.SemiFungible,
[WyvernSchemaName.ENSShortNameAuction]: AssetContractType.NonFungible,
}
// OTHER
const txCallbacks: {[key: string]: TxnCallback[]} = {}
/**
* Promisify a callback-syntax web3 function
* @param inner callback function that accepts a Web3 callback function and passes
* it to the Web3 function
*/
async function promisify<T>(
inner: (fn: Web3Callback<T>) => void
) {
return new Promise<T>((resolve, reject) =>
inner((err, res) => {
if (err) { reject(err) }
resolve(res)
})
)
}
/**
* Promisify a call a method on a contract,
* handling Parity errors. Returns '0x' if error.
* Note that if T is not "string", this may return a falsey
* value when the contract doesn't support the method (e.g. `isApprovedForAll`).
* @param callback An anonymous function that takes a web3 callback
* and returns a Web3 Contract's call result, e.g. `c => erc721.ownerOf(3, c)`
* @param onError callback when user denies transaction
*/
export async function promisifyCall<T>(
callback: (fn: Web3Callback<T>) => void,
onError?: (error: Error) => void
): Promise<T | undefined> {
try {
const result: any = await promisify<T>(callback)
if (result == '0x') {
// Geth compatibility
return undefined
}
return result as T
} catch (error) {
// Probably method not found, and web3 is a Parity node
if (onError) {
onError(error)
} else {
console.error(error)
}
return undefined
}
}
const track = (web3: Web3, txHash: string, onFinalized: TxnCallback) => {
if (txCallbacks[txHash]) {
txCallbacks[txHash].push(onFinalized)
} else {
txCallbacks[txHash] = [onFinalized]
const poll = async () => {
const tx = await promisify<Web3.Transaction>(c => web3.eth.getTransaction(txHash, c))
if (tx && tx.blockHash && tx.blockHash !== NULL_BLOCK_HASH) {
const receipt = await promisify<Web3.TransactionReceipt | null>(c => web3.eth.getTransactionReceipt(txHash, c))
if (!receipt) {
// Hack: assume success if no receipt
console.warn('No receipt found for ', txHash)
}
const status = receipt
? parseInt((receipt.status || "0").toString()) == 1
: true
txCallbacks[txHash].map(f => f(status))
delete txCallbacks[txHash]
} else {
setTimeout(poll, 1000)
}
}
poll().catch()
}
}
export const confirmTransaction = async (web3: Web3, txHash: string) => {
return new Promise((resolve, reject) => {
track(web3, txHash, (didSucceed: boolean) => {
if (didSucceed) {
resolve("Transaction complete!")
} else {
reject(new Error(`Transaction failed :( You might have already completed this action. See more on the mainnet at etherscan.io/tx/${txHash}`))
}
})
})
}
export const assetFromJSON = (asset: any): OpenSeaAsset => {
const isAnimated = asset.image_url && asset.image_url.endsWith('.gif')
const isSvg = asset.image_url && asset.image_url.endsWith('.svg')
const fromJSON: OpenSeaAsset = {
tokenId: asset.token_id.toString(),
tokenAddress: asset.asset_contract.address,
name: asset.name,
description: asset.description,
owner: asset.owner,
assetContract: assetContractFromJSON(asset.asset_contract),
collection: collectionFromJSON(asset.collection),
orders: asset.orders ? asset.orders.map(orderFromJSON) : null,
sellOrders: asset.sell_orders ? asset.sell_orders.map(orderFromJSON) : null,
buyOrders: asset.buy_orders ? asset.buy_orders.map(orderFromJSON) : null,
isPresale: asset.is_presale,
// Don't use previews if it's a special image
imageUrl: isAnimated || isSvg
? asset.image_url
: (asset.image_preview_url || asset.image_url),
imagePreviewUrl: asset.image_preview_url,
imageUrlOriginal: asset.image_original_url,
imageUrlThumbnail: asset.image_thumbnail_url,
externalLink: asset.external_link,
openseaLink: asset.permalink,
traits: asset.traits,
numSales: asset.num_sales,
lastSale: asset.last_sale ? assetEventFromJSON(asset.last_sale) : null,
backgroundColor: asset.background_color ? `#${asset.background_color}` : null,
transferFee: asset.transfer_fee
? makeBigNumber(asset.transfer_fee)
: null,
transferFeePaymentToken: asset.transfer_fee_payment_token
? tokenFromJSON(asset.transfer_fee_payment_token)
: null,
}
// If orders were included, put them in sell/buy order groups
if (fromJSON.orders && !fromJSON.sellOrders) {
fromJSON.sellOrders = fromJSON.orders.filter(o => o.side == OrderSide.Sell)
}
if (fromJSON.orders && !fromJSON.buyOrders) {
fromJSON.buyOrders = fromJSON.orders.filter(o => o.side == OrderSide.Buy)
}
return fromJSON
}
export const assetEventFromJSON = (assetEvent: any): AssetEvent => {
return {
eventType: assetEvent.event_type,
eventTimestamp: assetEvent.event_timestamp,
auctionType: assetEvent.auction_type,
totalPrice: assetEvent.total_price,
transaction: assetEvent.transaction ? transactionFromJSON(assetEvent.transaction) : null,
paymentToken: assetEvent.payment_token ? tokenFromJSON(assetEvent.payment_token) : null,
}
}
export const transactionFromJSON = (transaction: any): Transaction => {
return {
fromAccount: accountFromJSON(transaction.from_account),
toAccount: accountFromJSON(transaction.to_account),
createdDate: new Date(`${transaction.created_date}Z`),
modifiedDate: new Date(`${transaction.modified_date}Z`),
transactionHash: transaction.transaction_hash,
transactionIndex: transaction.transaction_index,
blockNumber: transaction.block_number,
blockHash: transaction.block_hash,
timestamp: new Date(`${transaction.timestamp}Z`),
}
}
export const accountFromJSON = (account: any): OpenSeaAccount => {
return {
address: account.address,
config: account.config,
profileImgUrl: account.profile_img_url,
user: account.user ? userFromJSON(account.user) : null
}
}
export const userFromJSON = (user: any): OpenSeaUser => {
return {
username: user.username
}
}
export const assetBundleFromJSON = (asset_bundle: any): OpenSeaAssetBundle => {
const fromJSON: OpenSeaAssetBundle = {
maker: asset_bundle.maker,
assets: asset_bundle.assets.map(assetFromJSON),
assetContract: asset_bundle.asset_contract
? assetContractFromJSON(asset_bundle.asset_contract)
: undefined,
name: asset_bundle.name,
slug: asset_bundle.slug,
description: asset_bundle.description,
externalLink: asset_bundle.external_link,
permalink: asset_bundle.permalink,
sellOrders: asset_bundle.sell_orders ? asset_bundle.sell_orders.map(orderFromJSON) : null
}
return fromJSON
}
export const assetContractFromJSON = (asset_contract: any): OpenSeaAssetContract => {
return {
name: asset_contract.name,
description: asset_contract.description,
type: asset_contract.asset_contract_type,
schemaName: asset_contract.schema_name,
address: asset_contract.address,
tokenSymbol: asset_contract.symbol,
buyerFeeBasisPoints: +asset_contract.buyer_fee_basis_points,
sellerFeeBasisPoints: +asset_contract.seller_fee_basis_points,
openseaBuyerFeeBasisPoints: +asset_contract.opensea_buyer_fee_basis_points,
openseaSellerFeeBasisPoints: +asset_contract.opensea_seller_fee_basis_points,
devBuyerFeeBasisPoints: +asset_contract.dev_buyer_fee_basis_points,
devSellerFeeBasisPoints: +asset_contract.dev_seller_fee_basis_points,
imageUrl: asset_contract.image_url,
externalLink: asset_contract.external_link,
wikiLink: asset_contract.wiki_link,
}
}
export const collectionFromJSON = (collection: any): OpenSeaCollection => {
const createdDate = new Date(`${collection.created_date}Z`)
return {
createdDate,
name: collection.name,
description: collection.description,
slug: collection.slug,
editors: collection.editors,
hidden: collection.hidden,
featured: collection.featured,
featuredImageUrl: collection.featured_image_url,
displayData: collection.display_data,
paymentTokens: (collection.payment_tokens || []).map(tokenFromJSON),
openseaBuyerFeeBasisPoints: +collection.opensea_buyer_fee_basis_points,
openseaSellerFeeBasisPoints: +collection.opensea_seller_fee_basis_points,
devBuyerFeeBasisPoints: +collection.dev_buyer_fee_basis_points,
devSellerFeeBasisPoints: +collection.dev_seller_fee_basis_points,
payoutAddress: collection.payout_address,
imageUrl: collection.image_url,
largeImageUrl: collection.large_image_url,
stats: collection.stats,
traitStats: collection.traits as OpenSeaTraitStats,
externalLink: collection.external_url,
wikiLink: collection.wiki_url,
}
}
export const tokenFromJSON = (token: any): OpenSeaFungibleToken => {
const fromJSON: OpenSeaFungibleToken = {
name: token.name,
symbol: token.symbol,
decimals: token.decimals,
address: token.address,
imageUrl: token.image_url,
ethPrice: token.eth_price,
usdPrice: token.usd_price,
}
return fromJSON
}
export const orderFromJSON = (order: any): Order => {
const createdDate = new Date(`${order.created_date}Z`)
const fromJSON: Order = {
hash: order.order_hash || order.hash,
cancelledOrFinalized: order.cancelled || order.finalized,
markedInvalid: order.marked_invalid,
metadata: order.metadata,
quantity: new BigNumber(order.quantity || 1),
exchange: order.exchange,
makerAccount: order.maker,
takerAccount: order.taker,
// Use string address to conform to Wyvern Order schema
maker: order.maker.address,
taker: order.taker.address,
makerRelayerFee: new BigNumber(order.maker_relayer_fee),
takerRelayerFee: new BigNumber(order.taker_relayer_fee),
makerProtocolFee: new BigNumber(order.maker_protocol_fee),
takerProtocolFee: new BigNumber(order.taker_protocol_fee),
makerReferrerFee: new BigNumber(order.maker_referrer_fee || 0),
waitingForBestCounterOrder: order.fee_recipient.address == NULL_ADDRESS,
feeMethod: order.fee_method,
feeRecipientAccount: order.fee_recipient,
feeRecipient: order.fee_recipient.address,
side: order.side,
saleKind: order.sale_kind,
target: order.target,
howToCall: order.how_to_call,
calldata: order.calldata,
replacementPattern: order.replacement_pattern,
staticTarget: order.static_target,
staticExtradata: order.static_extradata,
paymentToken: order.payment_token,
basePrice: new BigNumber(order.base_price),
extra: new BigNumber(order.extra),
currentBounty: new BigNumber(order.current_bounty || 0),
currentPrice: new BigNumber(order.current_price || 0),
createdTime: new BigNumber(Math.round(createdDate.getTime() / 1000)),
listingTime: new BigNumber(order.listing_time),
expirationTime: new BigNumber(order.expiration_time),
salt: new BigNumber(order.salt),
v: parseInt(order.v),
r: order.r,
s: order.s,
paymentTokenContract: order.payment_token_contract ? tokenFromJSON(order.payment_token_contract) : undefined,
asset: order.asset ? assetFromJSON(order.asset) : undefined,
assetBundle: order.asset_bundle ? assetBundleFromJSON(order.asset_bundle) : undefined,
}
// Use client-side price calc, to account for buyer fee (not added by server) and latency
fromJSON.currentPrice = estimateCurrentPrice(fromJSON)
return fromJSON
}
/**
* Convert an order to JSON, hashing it as well if necessary
* @param order order (hashed or unhashed)
*/
export const orderToJSON = (order: Order): OrderJSON => {
const asJSON: OrderJSON = {
exchange: order.exchange.toLowerCase(),
maker: order.maker.toLowerCase(),
taker: order.taker.toLowerCase(),
makerRelayerFee: order.makerRelayerFee.toString(),
takerRelayerFee: order.takerRelayerFee.toString(),
makerProtocolFee: order.makerProtocolFee.toString(),
takerProtocolFee: order.takerProtocolFee.toString(),
makerReferrerFee: order.makerReferrerFee.toString(),
feeMethod: order.feeMethod,
feeRecipient: order.feeRecipient.toLowerCase(),
side: order.side,
saleKind: order.saleKind,
target: order.target.toLowerCase(),
howToCall: order.howToCall,
calldata: order.calldata,
replacementPattern: order.replacementPattern,
staticTarget: order.staticTarget.toLowerCase(),
staticExtradata: order.staticExtradata,
paymentToken: order.paymentToken.toLowerCase(),
quantity: order.quantity.toString(),
basePrice: order.basePrice.toString(),
englishAuctionReservePrice: order.englishAuctionReservePrice ? order.englishAuctionReservePrice.toString() : undefined,
extra: order.extra.toString(),
createdTime: order.createdTime
? order.createdTime.toString()
: undefined,
listingTime: order.listingTime.toString(),
expirationTime: order.expirationTime.toString(),
salt: order.salt.toString(),
metadata: order.metadata,
v: order.v,
r: order.r,
s: order.s,
hash: order.hash
}
return asJSON
}
/**
* Sign messages using web3 personal signatures
* @param web3 Web3 instance
* @param message message to sign
* @param signerAddress web3 address signing the message
* @returns A signature if provider can sign, otherwise null
*/
export async function personalSignAsync(web3: Web3, message: string, signerAddress: string
): Promise<ECSignature> {
const signature = await promisify<Web3.JSONRPCResponsePayload>(c => web3.currentProvider.sendAsync({
method: 'personal_sign',
params: [message, signerAddress],
from: signerAddress,
id: new Date().getTime()
} as any, c)
)
const error = (signature as any).error
if (error) {
throw new Error(error)
}
return parseSignatureHex(signature.result)
}
/**
* Checks whether a given address contains any code
* @param web3 Web3 instance
* @param address input address
*/
export async function isContractAddress(web3: Web3, address: string
): Promise<boolean> {
const code = await promisify<string>(c => web3.eth.getCode(address, c))
return code !== '0x'
}
/**
* Special fixes for making BigNumbers using web3 results
* @param arg An arg or the result of a web3 call to turn into a BigNumber
*/
export function makeBigNumber(arg: number | string | BigNumber): BigNumber {
// Zero sometimes returned as 0x from contracts
if (arg === '0x') {
arg = 0
}
// fix "new BigNumber() number type has more than 15 significant digits"
arg = arg.toString()
return new BigNumber(arg)
}
/**
* Send a transaction to the blockchain and optionally confirm it
* @param web3 Web3 instance
* @param param0 __namedParameters
* @param from address sending transaction
* @param to destination contract address
* @param data data to send to contract
* @param gasPrice gas price to use. If unspecified, uses web3 default (mean gas price)
* @param value value in ETH to send with data. Defaults to 0
* @param onError callback when user denies transaction
*/
export async function sendRawTransaction(
web3: Web3,
{from, to, data, gasPrice, value = 0, gas}: Web3.TxData,
onError: (error: Error) => void
): Promise<string> {
if (gas == null) {
// This gas cannot be increased due to an ethjs error
gas = await estimateGas(web3, { from, to, data, value })
}
try {
const txHashRes = await promisify<string>(c => web3.eth.sendTransaction({
from,
to,
value,
data,
gas,
gasPrice
}, c))
return txHashRes.toString()
} catch (error) {
onError(error)
throw error
}
}
/**
* Call a method on a contract, sending arbitrary data and
* handling Parity errors. Returns '0x' if error.
* @param web3 Web3 instance
* @param param0 __namedParameters
* @param from address sending call
* @param to destination contract address
* @param data data to send to contract
* @param onError callback when user denies transaction
*/
export async function rawCall(
web3: Web3,
{ from, to, data }: Web3.CallData,
onError?: (error: Error) => void
): Promise<string> {
try {
const result = await promisify<string>(c => web3.eth.call({
from,
to,
data
}, c))
return result
} catch (error) {
// Probably method not found, and web3 is a Parity node
if (onError) {
onError(error)
}
// Backwards compatibility with Geth nodes
return '0x'
}
}
/**
* Estimate Gas usage for a transaction
* @param web3 Web3 instance
* @param from address sending transaction
* @param to destination contract address
* @param data data to send to contract
* @param value value in ETH to send with data
*/
export async function estimateGas(
web3: Web3,
{from, to, data, value = 0 }: Web3.TxData
): Promise<number> {
const amount = await promisify<number>(c => web3.eth.estimateGas({
from,
to,
value,
data,
}, c))
return amount
}
/**
* Get mean gas price for sending a txn, in wei
* @param web3 Web3 instance
*/
export async function getCurrentGasPrice(web3: Web3): Promise<BigNumber> {
const meanGas = await promisify<BigNumber>(c => web3.eth.getGasPrice(c))
return meanGas
}
/**
* Get current transfer fees for an asset
* @param web3 Web3 instance
* @param asset The asset to check for transfer fees
*/
export async function getTransferFeeSettings(
web3: Web3,
{ asset, accountAddress }: {
asset: Asset;
accountAddress?: string;
}
) {
let transferFee: BigNumber | undefined
let transferFeeTokenAddress: string | undefined
if (asset.tokenAddress.toLowerCase() == ENJIN_ADDRESS.toLowerCase()) {
// Enjin asset
const feeContract = web3.eth.contract(ERC1155 as any).at(asset.tokenAddress)
const params = await promisifyCall<any[]>(c => feeContract.transferSettings(
asset.tokenId,
{ from: accountAddress },
c)
)
if (params) {
transferFee = makeBigNumber(params[3])
if (params[2] == 0) {
transferFeeTokenAddress = ENJIN_COIN_ADDRESS
}
}
}
return { transferFee, transferFeeTokenAddress }
}
// sourced from 0x.js:
// https://github.com/ProjectWyvern/wyvern-js/blob/39999cb93ce5d80ea90b4382182d1bd4339a9c6c/src/utils/signature_utils.ts
function parseSignatureHex(signature: string): ECSignature {
// HACK: There is no consensus on whether the signatureHex string should be formatted as
// v + r + s OR r + s + v, and different clients (even different versions of the same client)
// return the signature params in different orders. In order to support all client implementations,
// we parse the signature in both ways, and evaluate if either one is a valid signature.
const validVParamValues = [27, 28]
const ecSignatureRSV = _parseSignatureHexAsRSV(signature)
if (_.includes(validVParamValues, ecSignatureRSV.v)) {
return ecSignatureRSV
}
// For older clients
const ecSignatureVRS = _parseSignatureHexAsVRS(signature)
if (_.includes(validVParamValues, ecSignatureVRS.v)) {
return ecSignatureVRS
}
throw new Error('Invalid signature')
function _parseSignatureHexAsVRS(signatureHex: string) {
const signatureBuffer: any = ethUtil.toBuffer(signatureHex)
let v = signatureBuffer[0]
if (v < 27) {
v += 27
}
const r = signatureBuffer.slice(1, 33)
const s = signatureBuffer.slice(33, 65)
const ecSignature = {
v,
r: ethUtil.bufferToHex(r),
s: ethUtil.bufferToHex(s),
}
return ecSignature
}
function _parseSignatureHexAsRSV(signatureHex: string) {
const { v, r, s } = ethUtil.fromRpcSig(signatureHex)
const ecSignature = {
v,
r: ethUtil.bufferToHex(r),
s: ethUtil.bufferToHex(s),
}
return ecSignature
}
}
/**
* Estimates the price of an order
* @param order The order to estimate price on
* @param secondsToBacktrack The number of seconds to subtract on current time,
* to fix race conditions
* @param shouldRoundUp Whether to round up fractional wei
*/
export function estimateCurrentPrice(order: Order, secondsToBacktrack = 30, shouldRoundUp = true) {
let { basePrice, listingTime, expirationTime, extra } = order
const { side, takerRelayerFee, saleKind } = order
const now = new BigNumber(Math.round(Date.now() / 1000)).minus(secondsToBacktrack)
basePrice = new BigNumber(basePrice)
listingTime = new BigNumber(listingTime)
expirationTime = new BigNumber(expirationTime)
extra = new BigNumber(extra)
let exactPrice = basePrice
if (saleKind === SaleKind.FixedPrice) {
// Do nothing, price is correct
} else if (saleKind === SaleKind.DutchAuction) {
const diff = extra.times(now.minus(listingTime))
.dividedBy(expirationTime.minus(listingTime))
exactPrice = side == OrderSide.Sell
/* Sell-side - start price: basePrice. End price: basePrice - extra. */
? basePrice.minus(diff)
/* Buy-side - start price: basePrice. End price: basePrice + extra. */
: basePrice.plus(diff)
}
// Add taker fee only for buyers
if (side === OrderSide.Sell && !order.waitingForBestCounterOrder) {
// Buyer fee increases sale price
exactPrice = exactPrice.times(+takerRelayerFee / INVERSE_BASIS_POINT + 1)
}
return shouldRoundUp ? exactPrice.ceil() : exactPrice
}
/**
* Get the Wyvern representation of a fungible asset
* @param schema The WyvernSchema needed to access this asset
* @param asset The asset to trade
* @param quantity The number of items to trade
*/
export function getWyvernAsset(
schema: Schema<WyvernAsset>,
asset: Asset,
quantity = new BigNumber(1)
): WyvernAsset {
const tokenId = asset.tokenId != null
? asset.tokenId.toString()
: undefined
return schema.assetFromFields({
'ID': tokenId,
'Quantity': quantity.toString(),
'Address': asset.tokenAddress.toLowerCase(),
'Name': asset.name
})
}
/**
* Get the Wyvern representation of a group of assets
* Sort order is enforced here. Throws if there's a duplicate.
* @param assets Assets to bundle
* @param schemas The WyvernSchemas needed to access each asset, respectively
* @param quantities The quantity of each asset to bundle, respectively
*/
export function getWyvernBundle(
assets: Asset[],
schemas: Array<Schema<WyvernAsset>>,
quantities: BigNumber[]
): WyvernBundle {
if (assets.length != quantities.length) {
throw new Error("Bundle must have a quantity for every asset")
}
if (assets.length != schemas.length) {
throw new Error("Bundle must have a schema for every asset")
}
const wyAssets = assets.map((asset, i) => getWyvernAsset(schemas[i], asset, quantities[i]))
const sorters = [
(assetAndSchema: { asset: WyvernAsset, schema: WyvernSchemaName }) => assetAndSchema.asset.address,
(assetAndSchema: { asset: WyvernAsset, schema: WyvernSchemaName }) => assetAndSchema.asset.id || 0
]
const wyAssetsAndSchemas = wyAssets.map((asset, i) => ({
asset,
schema: schemas[i].name as WyvernSchemaName
}))
const uniqueAssets = _.uniqBy(wyAssetsAndSchemas, group => `${sorters[0](group)}-${sorters[1](group)}`)
if (uniqueAssets.length != wyAssetsAndSchemas.length) {
throw new Error("Bundle can't contain duplicate assets")
}
const sortedWyAssetsAndSchemas = _.sortBy(wyAssetsAndSchemas, sorters)
return {
assets: sortedWyAssetsAndSchemas.map(group => group.asset),
schemas: sortedWyAssetsAndSchemas.map(group => group.schema),
}
}
/**
* Get the non-prefixed hash for the order
* (Fixes a Wyvern typescript issue and casing issue)
* @param order order to hash
*/
export function getOrderHash(order: UnhashedOrder) {
const orderWithStringTypes = {
...order,
maker: order.maker.toLowerCase(),
taker: order.taker.toLowerCase(),
feeRecipient: order.feeRecipient.toLowerCase(),
side: order.side.toString(),
saleKind: order.saleKind.toString(),
howToCall: order.howToCall.toString(),
feeMethod: order.feeMethod.toString()
}
return WyvernProtocol.getOrderHashHex(orderWithStringTypes as any)
}
/**
* Assign an order and a new matching order to their buy/sell sides
* @param order Original order
* @param matchingOrder The result of _makeMatchingOrder
*/
export function assignOrdersToSides(order: Order, matchingOrder: UnsignedOrder ): { buy: Order; sell: Order } {
const isSellOrder = order.side == OrderSide.Sell
let buy: Order
let sell: Order
if (!isSellOrder) {
buy = order
sell = {
...matchingOrder,
v: buy.v,
r: buy.r,
s: buy.s
}
} else {
sell = order
buy = {
...matchingOrder,
v: sell.v,
r: sell.r,
s: sell.s
}
}
return { buy, sell }
}
// BROKEN
// TODO fix this calldata for buy orders
async function canSettleOrder(client: OpenSeaPort, order: Order, matchingOrder: Order): Promise<boolean> {
// HACK that doesn't always work
// to change null address to 0x1111111... for replacing calldata
const calldata = order.calldata.slice(0, 98) + "1111111111111111111111111111111111111111" + order.calldata.slice(138)
const seller = order.side == OrderSide.Buy ? matchingOrder.maker : order.maker
const proxy = await client._getProxy(seller)
if (!proxy) {
console.warn(`No proxy found for seller ${seller}`)
return false
}
const contract = (client.web3.eth.contract([proxyABI])).at(proxy)
return promisify<boolean>(c =>
contract.proxy.call(
order.target,
order.howToCall,
calldata,
{from: seller},
c)
)
}
/**
* Delay using setTimeout
* @param ms milliseconds to wait
*/
export async function delay(ms: number) {
return new Promise(res => setTimeout(res, ms))
}
/**
* Validates that an address exists, isn't null, and is properly
* formatted for Wyvern and OpenSea
* @param address input address
*/
export function validateAndFormatWalletAddress(web3: Web3, address: string): string {
if (!address) {
throw new Error('No wallet address found')
}
if (!web3.isAddress(address)) {
throw new Error('Invalid wallet address')
}
if (address == NULL_ADDRESS) {
throw new Error('Wallet cannot be the null address')
}
return address.toLowerCase()
}
/**
* Notify developer when a pattern will be deprecated
* @param msg message to log to console
*/
export function onDeprecated(msg: string) {
console.warn(`DEPRECATION NOTICE: ${msg}`)
}
/**
* Get special-case approval addresses for an erc721 contract
* @param erc721Contract contract to check
*/
export async function getNonCompliantApprovalAddress(erc721Contract: Web3.ContractInstance, tokenId: string, accountAddress: string): Promise<string | undefined> {
const results = await Promise.all([
// CRYPTOKITTIES check
promisifyCall<string>(c => erc721Contract.kittyIndexToApproved.call(tokenId, c)),
// Etherbots check
promisifyCall<string>(c => erc721Contract.partIndexToApproved.call(tokenId, c)),
])
return _.compact(results)[0]
} | the_stack |
import * as ec2 from '@aws-cdk/aws-ec2';
import * as ssm from '@aws-cdk/aws-ssm';
// v2 - keep this import as a separate section to reduce merge conflict when forward merging with the v2 branch.
// eslint-disable-next-line
import { Construct as CoreConstruct } from '@aws-cdk/core';
/**
* The ECS-optimized AMI variant to use. For more information, see
* [Amazon ECS-optimized AMIs](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/ecs-optimized_AMI.html).
*/
export enum AmiHardwareType {
/**
* Use the standard Amazon ECS-optimized AMI.
*/
STANDARD = 'Standard',
/**
* Use the Amazon ECS GPU-optimized AMI.
*/
GPU = 'GPU',
/**
* Use the Amazon ECS-optimized Amazon Linux 2 (arm64) AMI.
*/
ARM = 'ARM64',
}
/**
* ECS-optimized Windows version list
*/
export enum WindowsOptimizedVersion {
SERVER_2019 = '2019',
SERVER_2016 = '2016',
}
/*
* TODO:v2.0.0
* * remove `export` keyword
* * remove @deprecated
*/
/**
* The properties that define which ECS-optimized AMI is used.
*
* @deprecated see {@link EcsOptimizedImage}
*/
export interface EcsOptimizedAmiProps {
/**
* The Amazon Linux generation to use.
*
* @default AmazonLinuxGeneration.AmazonLinux2
*/
readonly generation?: ec2.AmazonLinuxGeneration;
/**
* The Windows Server version to use.
*
* @default none, uses Linux generation
*/
readonly windowsVersion?: WindowsOptimizedVersion;
/**
* The ECS-optimized AMI variant to use.
*
* @default AmiHardwareType.Standard
*/
readonly hardwareType?: AmiHardwareType;
/**
* Whether the AMI ID is cached to be stable between deployments
*
* By default, the newest image is used on each deployment. This will cause
* instances to be replaced whenever a new version is released, and may cause
* downtime if there aren't enough running instances in the AutoScalingGroup
* to reschedule the tasks on.
*
* If set to true, the AMI ID will be cached in `cdk.context.json` and the
* same value will be used on future runs. Your instances will not be replaced
* but your AMI version will grow old over time. To refresh the AMI lookup,
* you will have to evict the value from the cache using the `cdk context`
* command. See https://docs.aws.amazon.com/cdk/latest/guide/context.html for
* more information.
*
* Can not be set to `true` in environment-agnostic stacks.
*
* @default false
*/
readonly cachedInContext?: boolean;
}
/*
* TODO:v2.0.0 remove EcsOptimizedAmi
*/
/**
* Construct a Linux or Windows machine image from the latest ECS Optimized AMI published in SSM
*
* @deprecated see {@link EcsOptimizedImage#amazonLinux}, {@link EcsOptimizedImage#amazonLinux} and {@link EcsOptimizedImage#windows}
*/
export class EcsOptimizedAmi implements ec2.IMachineImage {
private readonly generation?: ec2.AmazonLinuxGeneration;
private readonly windowsVersion?: WindowsOptimizedVersion;
private readonly hwType: AmiHardwareType;
private readonly amiParameterName: string;
private readonly cachedInContext: boolean;
/**
* Constructs a new instance of the EcsOptimizedAmi class.
*/
constructor(props?: EcsOptimizedAmiProps) {
this.hwType = (props && props.hardwareType) || AmiHardwareType.STANDARD;
if (props && props.generation) { // generation defined in the props object
if (props.generation === ec2.AmazonLinuxGeneration.AMAZON_LINUX && this.hwType !== AmiHardwareType.STANDARD) {
throw new Error('Amazon Linux does not support special hardware type. Use Amazon Linux 2 instead');
} else if (props.windowsVersion) {
throw new Error('"windowsVersion" and Linux image "generation" cannot be both set');
} else {
this.generation = props.generation;
}
} else if (props && props.windowsVersion) {
if (this.hwType !== AmiHardwareType.STANDARD) {
throw new Error('Windows Server does not support special hardware type');
} else {
this.windowsVersion = props.windowsVersion;
}
} else { // generation not defined in props object
// always default to Amazon Linux v2 regardless of HW
this.generation = ec2.AmazonLinuxGeneration.AMAZON_LINUX_2;
}
// set the SSM parameter name
this.amiParameterName = '/aws/service/ecs/optimized-ami/'
+ (this.generation === ec2.AmazonLinuxGeneration.AMAZON_LINUX ? 'amazon-linux/' : '')
+ (this.generation === ec2.AmazonLinuxGeneration.AMAZON_LINUX_2 ? 'amazon-linux-2/' : '')
+ (this.windowsVersion ? `windows_server/${this.windowsVersion}/english/full/` : '')
+ (this.hwType === AmiHardwareType.GPU ? 'gpu/' : '')
+ (this.hwType === AmiHardwareType.ARM ? 'arm64/' : '')
+ 'recommended/image_id';
this.cachedInContext = props?.cachedInContext ?? false;
}
/**
* Return the correct image
*/
public getImage(scope: CoreConstruct): ec2.MachineImageConfig {
const ami = lookupImage(scope, this.cachedInContext, this.amiParameterName);
const osType = this.windowsVersion ? ec2.OperatingSystemType.WINDOWS : ec2.OperatingSystemType.LINUX;
return {
imageId: ami,
osType,
userData: ec2.UserData.forOperatingSystem(osType),
};
}
}
/**
* Additional configuration properties for EcsOptimizedImage factory functions
*/
export interface EcsOptimizedImageOptions {
/**
* Whether the AMI ID is cached to be stable between deployments
*
* By default, the newest image is used on each deployment. This will cause
* instances to be replaced whenever a new version is released, and may cause
* downtime if there aren't enough running instances in the AutoScalingGroup
* to reschedule the tasks on.
*
* If set to true, the AMI ID will be cached in `cdk.context.json` and the
* same value will be used on future runs. Your instances will not be replaced
* but your AMI version will grow old over time. To refresh the AMI lookup,
* you will have to evict the value from the cache using the `cdk context`
* command. See https://docs.aws.amazon.com/cdk/latest/guide/context.html for
* more information.
*
* Can not be set to `true` in environment-agnostic stacks.
*
* @default false
*/
readonly cachedInContext?: boolean;
}
/**
* Construct a Linux or Windows machine image from the latest ECS Optimized AMI published in SSM
*/
export class EcsOptimizedImage implements ec2.IMachineImage {
/**
* Construct an Amazon Linux 2 image from the latest ECS Optimized AMI published in SSM
*
* @param hardwareType ECS-optimized AMI variant to use
*/
public static amazonLinux2(hardwareType = AmiHardwareType.STANDARD, options: EcsOptimizedImageOptions = {}): EcsOptimizedImage {
return new EcsOptimizedImage({
generation: ec2.AmazonLinuxGeneration.AMAZON_LINUX_2,
hardwareType,
cachedInContext: options.cachedInContext,
});
}
/**
* Construct an Amazon Linux AMI image from the latest ECS Optimized AMI published in SSM
*/
public static amazonLinux(options: EcsOptimizedImageOptions = {}): EcsOptimizedImage {
return new EcsOptimizedImage({
generation: ec2.AmazonLinuxGeneration.AMAZON_LINUX,
cachedInContext: options.cachedInContext,
});
}
/**
* Construct a Windows image from the latest ECS Optimized AMI published in SSM
*
* @param windowsVersion Windows Version to use
*/
public static windows(windowsVersion: WindowsOptimizedVersion, options: EcsOptimizedImageOptions = {}): EcsOptimizedImage {
return new EcsOptimizedImage({
windowsVersion,
cachedInContext: options.cachedInContext,
});
}
private readonly generation?: ec2.AmazonLinuxGeneration;
private readonly windowsVersion?: WindowsOptimizedVersion;
private readonly hwType?: AmiHardwareType;
private readonly amiParameterName: string;
private readonly cachedInContext: boolean;
/**
* Constructs a new instance of the EcsOptimizedAmi class.
*/
private constructor(props: EcsOptimizedAmiProps) {
this.hwType = props && props.hardwareType;
if (props.windowsVersion) {
this.windowsVersion = props.windowsVersion;
} else if (props.generation) {
this.generation = props.generation;
} else {
throw new Error('This error should never be thrown');
}
// set the SSM parameter name
this.amiParameterName = '/aws/service/ecs/optimized-ami/'
+ (this.generation === ec2.AmazonLinuxGeneration.AMAZON_LINUX ? 'amazon-linux/' : '')
+ (this.generation === ec2.AmazonLinuxGeneration.AMAZON_LINUX_2 ? 'amazon-linux-2/' : '')
+ (this.windowsVersion ? `windows_server/${this.windowsVersion}/english/full/` : '')
+ (this.hwType === AmiHardwareType.GPU ? 'gpu/' : '')
+ (this.hwType === AmiHardwareType.ARM ? 'arm64/' : '')
+ 'recommended/image_id';
this.cachedInContext = props?.cachedInContext ?? false;
}
/**
* Return the correct image
*/
public getImage(scope: CoreConstruct): ec2.MachineImageConfig {
const ami = lookupImage(scope, this.cachedInContext, this.amiParameterName);
const osType = this.windowsVersion ? ec2.OperatingSystemType.WINDOWS : ec2.OperatingSystemType.LINUX;
return {
imageId: ami,
osType,
userData: ec2.UserData.forOperatingSystem(osType),
};
}
}
/**
* Amazon ECS variant
*/
export enum BottlerocketEcsVariant {
/**
* aws-ecs-1 variant
*/
AWS_ECS_1 = 'aws-ecs-1'
}
/**
* Properties for BottleRocketImage
*/
export interface BottleRocketImageProps {
/**
* The Amazon ECS variant to use.
* Only `aws-ecs-1` is currently available
*
* @default - BottlerocketEcsVariant.AWS_ECS_1
*/
readonly variant?: BottlerocketEcsVariant;
/**
* The CPU architecture
*
* @default - x86_64
*/
readonly architecture?: ec2.InstanceArchitecture;
/**
* Whether the AMI ID is cached to be stable between deployments
*
* By default, the newest image is used on each deployment. This will cause
* instances to be replaced whenever a new version is released, and may cause
* downtime if there aren't enough running instances in the AutoScalingGroup
* to reschedule the tasks on.
*
* If set to true, the AMI ID will be cached in `cdk.context.json` and the
* same value will be used on future runs. Your instances will not be replaced
* but your AMI version will grow old over time. To refresh the AMI lookup,
* you will have to evict the value from the cache using the `cdk context`
* command. See https://docs.aws.amazon.com/cdk/latest/guide/context.html for
* more information.
*
* Can not be set to `true` in environment-agnostic stacks.
*
* @default false
*/
readonly cachedInContext?: boolean;
}
/**
* Construct an Bottlerocket image from the latest AMI published in SSM
*/
export class BottleRocketImage implements ec2.IMachineImage {
private readonly amiParameterName: string;
/**
* Amazon ECS variant for Bottlerocket AMI
*/
private readonly variant: string;
/**
* Instance architecture
*/
private readonly architecture: ec2.InstanceArchitecture;
private readonly cachedInContext: boolean;
/**
* Constructs a new instance of the BottleRocketImage class.
*/
public constructor(props: BottleRocketImageProps = {}) {
this.variant = props.variant ?? BottlerocketEcsVariant.AWS_ECS_1;
this.architecture = props.architecture ?? ec2.InstanceArchitecture.X86_64;
// set the SSM parameter name
this.amiParameterName = `/aws/service/bottlerocket/${this.variant}/${this.architecture}/latest/image_id`;
this.cachedInContext = props.cachedInContext ?? false;
}
/**
* Return the correct image
*/
public getImage(scope: CoreConstruct): ec2.MachineImageConfig {
const ami = lookupImage(scope, this.cachedInContext, this.amiParameterName);
return {
imageId: ami,
osType: ec2.OperatingSystemType.LINUX,
userData: ec2.UserData.custom(''),
};
}
}
function lookupImage(scope: CoreConstruct, cachedInContext: boolean | undefined, parameterName: string) {
return cachedInContext
? ssm.StringParameter.valueFromLookup(scope, parameterName)
: ssm.StringParameter.valueForTypedStringParameter(scope, parameterName, ssm.ParameterType.AWS_EC2_IMAGE_ID);
} | the_stack |
import * as assert from 'assert'
import * as vscode from 'vscode'
import { createBackButton } from '../../../shared/ui/buttons'
import {
createLabelQuickPick,
createQuickPick,
FilterBoxQuickPickPrompter,
DataQuickPick,
DataQuickPickItem,
DEFAULT_QUICKPICK_OPTIONS,
QuickPickPrompter,
CUSTOM_USER_INPUT,
} from '../../../shared/ui/pickerPrompter'
import { WIZARD_BACK } from '../../../shared/wizards/wizard'
import { exposeEmitters, ExposeEmitters } from '../vscode/testUtils'
import { recentlyUsed } from '../../../shared/localizedText'
describe('createQuickPick', function () {
const items: DataQuickPickItem<string>[] = [
{ label: 'item1', data: 'yes' },
{ label: 'item2', data: 'no' },
]
it('applies default options', async function () {
const prompter = createQuickPick([])
const picker = prompter.quickPick
Object.keys(picker).forEach(key => {
const defaultValue = (DEFAULT_QUICKPICK_OPTIONS as Record<string, any>)[key]
if (defaultValue !== undefined) {
assert.strictEqual(picker[key as keyof vscode.QuickPick<any>], defaultValue)
}
})
})
it('creates a new prompter with options', async function () {
const prompter = createQuickPick(items, { title: 'test' })
assert.strictEqual(prompter.quickPick.title, 'test')
})
it('creates a new prompter when given a promise for items', async function () {
let resolveItems!: (items: DataQuickPickItem<string>[]) => void
const itemsPromise = new Promise<DataQuickPickItem<string>[]>(resolve => (resolveItems = resolve))
const prompter = createQuickPick(itemsPromise)
prompter.prompt()
assert.strictEqual(prompter.quickPick.busy, true)
assert.strictEqual(prompter.quickPick.enabled, false)
resolveItems(items)
await itemsPromise
assert.strictEqual(prompter.quickPick.busy, false)
assert.strictEqual(prompter.quickPick.enabled, true)
assert.deepStrictEqual(prompter.quickPick.items, items)
})
it('creates a new prompter when given an AsyncIterable', async function () {
let r1!: (v?: any) => void
let r2!: (v?: any) => void
const p1 = new Promise(r => (r1 = r))
const p2 = new Promise(r => (r2 = r))
async function* generator() {
for (const item of items) {
if (item === items[0]) {
await p1
} else {
await p2
}
yield [item]
}
}
const prompter = createQuickPick(generator())
r1()
await new Promise(r => setImmediate(r))
assert.deepStrictEqual(prompter.quickPick.items, [items[0]])
assert.strictEqual(prompter.quickPick.busy, true)
r2()
await new Promise(r => setImmediate(r))
assert.deepStrictEqual(prompter.quickPick.items, items)
assert.strictEqual(prompter.quickPick.busy, false)
})
})
describe('createLabelQuickPick', function () {
it('creates a new prompter using just labels', async function () {
const labelItems = [{ label: 'name1' }, { label: 'name2' }]
const prompter = createLabelQuickPick(labelItems)
assert.deepStrictEqual(
prompter.quickPick.items,
labelItems.map(item => ({ label: item.label, data: item.label }))
)
})
it('can use promises', async function () {
const labelItems = [{ label: 'name1' }, { label: 'name2' }]
const itemsPromise = Promise.resolve(labelItems)
const prompter = createLabelQuickPick(itemsPromise)
assert.strictEqual(prompter.quickPick.busy, true)
})
})
describe('QuickPickPrompter', function () {
const testItems = [
{ label: 'item1', data: 0 },
{ label: 'item2', data: 1 },
{ label: 'item3', data: 2 },
]
let picker: ExposeEmitters<DataQuickPick<number>, 'onDidChangeValue' | 'onDidTriggerButton' | 'onDidHide'>
let testPrompter: QuickPickPrompter<number>
beforeEach(function () {
picker = exposeEmitters(vscode.window.createQuickPick(), [
'onDidChangeValue',
'onDidTriggerButton',
'onDidHide',
])
picker.items = testItems
testPrompter = new QuickPickPrompter(picker)
})
it('can select an item', async function () {
testPrompter.onDidShow(() => (picker.selectedItems = [testItems[0]]))
const result = testPrompter.prompt()
assert.strictEqual(await result, testItems[0].data)
})
it('steps can be set', function () {
testPrompter.setSteps(1, 2)
assert.strictEqual(picker.step, 1)
assert.strictEqual(picker.totalSteps, 2)
})
it('can handle back button', async function () {
testPrompter.onDidShow(() => picker.fireOnDidTriggerButton(createBackButton()))
assert.strictEqual(await testPrompter.prompt(), WIZARD_BACK)
})
it('can accept input from buttons', async function () {
const testButton = { iconPath: vscode.Uri.parse(''), onClick: () => 5 }
testPrompter.onDidShow(() => picker.fireOnDidTriggerButton(testButton))
assert.strictEqual(await testPrompter.prompt(), 5)
})
it('can selectively enable input when loading', async function () {
const p = testPrompter.loadItems(new Promise(r => setImmediate(() => r([]))), false)
assert.strictEqual(testPrompter.quickPick.enabled, true)
await p
assert.strictEqual(testPrompter.quickPick.enabled, true)
})
it('does not close if button does not return anything', async function () {
const testButton = { iconPath: vscode.Uri.parse(''), onClick: () => {} }
testPrompter.onDidShow(() => {
picker.fireOnDidTriggerButton(testButton)
picker.selectedItems = [testItems[0]]
})
assert.strictEqual(await testPrompter.prompt(), testItems[0].data)
})
it('returns recent item', async function () {
testPrompter.onDidShow(() => (picker.selectedItems = [testItems[1]]))
const result = testPrompter.prompt()
assert.strictEqual(await result, testItems[1].data)
assert.strictEqual(testPrompter.recentItem, testItems[1])
})
it('can set recent item', async function () {
testPrompter.recentItem = testItems[2]
assert.deepStrictEqual(picker.activeItems, [testItems[2]])
// setRecentItem() puts the item at the top of the list. #2148
assert.deepStrictEqual(picker.items[0], picker.activeItems[0])
})
it('tries to recover recent item from partial data', async function () {
testPrompter.recentItem = 2
assert.deepStrictEqual(picker.activeItems, [testItems[2]])
})
it('shows first item if recent item does not exist', async function () {
testPrompter.recentItem = { label: 'item4', data: 3 }
assert.deepStrictEqual(picker.activeItems, [testItems[0]])
})
it('adds a message to the description when an item has been previously selected', async function () {
testPrompter = new QuickPickPrompter(picker, { recentItemText: true })
testPrompter.recentItem = { label: 'item1', data: 0 }
const description = ` (${recentlyUsed})`
assert.deepStrictEqual(picker.activeItems, [{ ...testItems[0], description }])
})
it('shows a `noItemsFound` item if no items are loaded', async function () {
const noItemsFoundItem = { label: 'placeholder', data: 0 }
testPrompter = new QuickPickPrompter(picker, { noItemsFoundItem })
testPrompter.clearAndLoadItems([])
assert.deepStrictEqual(picker.items, [noItemsFoundItem])
})
it('does not show a `noItemsFound` item if busy', async function () {
let resolveItems!: (items: DataQuickPickItem<number>[]) => void
const itemsPromise = new Promise<DataQuickPickItem<number>[]>(resolve => (resolveItems = resolve))
const noItemsFoundItem = { label: 'placeholder', data: 0 }
testPrompter = new QuickPickPrompter(picker, { noItemsFoundItem })
testPrompter.clearAndLoadItems(itemsPromise)
assert.strictEqual(picker.items.length, 0)
assert.strictEqual(picker.busy, true)
resolveItems(testItems)
})
it('shows an error item if a Promise fails to load things', async function () {
const badPromise = Promise.reject(new Error('my error'))
const errorItem = { label: 'error', data: 0 }
testPrompter = new QuickPickPrompter(picker, { errorItem })
await testPrompter.clearAndLoadItems(badPromise)
assert.deepStrictEqual(picker.items, [{ detail: 'my error', ...errorItem }])
})
it('handles AsyncIterables that return something', async function () {
async function* generator() {
for (const item of testItems.slice(0, -1)) {
yield [item]
}
return testItems.slice(-1)
}
await testPrompter.clearAndLoadItems(generator())
assert.strictEqual(picker.items.length, 3)
})
it('handles AsyncIterables that throw', async function () {
const errorItem = { label: 'error', data: 0 }
testPrompter = new QuickPickPrompter(picker, { errorItem })
async function* generator() {
for (const item of testItems.slice(0, -1)) {
yield [item]
}
throw new Error('my error')
}
await testPrompter.clearAndLoadItems(generator())
assert.strictEqual(picker.items.length, 3)
assert.strictEqual(picker.items[picker.items.length - 1].detail, 'my error')
})
it('stops requesting from an AsyncIterable when hidden', async function () {
let unlock!: () => void
let lock = new Promise<void>(r => (unlock = r))
async function* generator() {
for (const item of testItems) {
await lock
yield [item]
lock = new Promise<void>(r => (unlock = r))
}
}
testPrompter.clearAndLoadItems(generator())
picker.fireOnDidHide()
unlock()
await new Promise(r => setImmediate(r))
assert.strictEqual(picker.items.length, 1)
unlock()
await new Promise(r => setImmediate(r))
assert.strictEqual(picker.items.length, 1)
})
})
describe('FilterBoxQuickPickPrompter', function () {
const TEST_TIMEOUT = 5000
const testItems = [
{ label: 'item1', data: 0 },
{ label: 'item2', data: 1 },
{ label: 'item3', data: 2 },
]
const filterBoxInputSettings = {
label: 'Enter a number',
transform: (resp: string) => Number.parseInt(resp),
validator: (resp: string) => (Number.isNaN(Number.parseInt(resp)) ? 'NaN' : undefined),
}
let picker: ExposeEmitters<DataQuickPick<number>, 'onDidChangeValue' | 'onDidAccept'>
let testPrompter: FilterBoxQuickPickPrompter<number>
function addTimeout(): void {
setTimeout(picker.dispose.bind(picker), TEST_TIMEOUT)
}
function loadAndPrompt(): ReturnType<typeof testPrompter.prompt> {
return testPrompter.loadItems(testItems).then(() => testPrompter.prompt())
}
beforeEach(function () {
if (vscode.version.startsWith('1.42')) {
this.skip()
}
picker = exposeEmitters(vscode.window.createQuickPick(), ['onDidChangeValue', 'onDidAccept'])
testPrompter = new FilterBoxQuickPickPrompter(picker, filterBoxInputSettings)
addTimeout()
})
it('adds a new item based off the filter box', async function () {
const input = '123'
picker.onDidChangeActive(items => {
if (items[0]?.description !== undefined) {
picker.selectedItems = [items[0]]
}
})
testPrompter.onDidShow(() => {
// Note: VSC 1.42 will _not_ fire the change value event when setting `picker.value`
picker.value = input
picker.fireOnDidChangeValue(input)
})
assert.strictEqual(await loadAndPrompt(), Number(input))
})
it('can handle additional items being added', async function () {
const input = '456'
picker.onDidChangeActive(items => {
if (items[0]?.description !== undefined) {
picker.selectedItems = [items[0]]
}
})
testPrompter.onDidShow(async () => {
picker.value = input
picker.fireOnDidChangeValue(input)
const newItems = [{ label: 'item4', data: 3 }]
const newItemsPromise = Promise.resolve(newItems)
await testPrompter.loadItems(newItems)
await testPrompter.loadItems(newItemsPromise)
})
assert.strictEqual(await loadAndPrompt(), Number(input))
})
it('can accept custom input as a last response', async function () {
const input = '123'
testPrompter.onDidShow(() => {
picker.onDidChangeActive(active => {
if (active[0]?.description !== undefined) {
picker.selectedItems = [active[0]]
picker.fireOnDidAccept()
}
})
testPrompter.recentItem = { data: CUSTOM_USER_INPUT, description: input } as any
picker.fireOnDidChangeValue(input)
})
assert.strictEqual(await loadAndPrompt(), Number(input))
})
it('validates the custom input', async function () {
const input = 'not a number'
testPrompter.onDidShow(() => {
const disposable = picker.onDidChangeActive(items => {
if (
items[0]?.description === input &&
items[0]?.detail?.includes('NaN') &&
items[0]?.invalidSelection
) {
picker.onDidChangeActive(items => {
if (items.length > 0) {
picker.selectedItems = [picker.items[0]]
}
})
picker.selectedItems = [picker.items[0]]
disposable.dispose()
picker.value = ''
picker.fireOnDidChangeValue('')
}
})
picker.value = input
picker.fireOnDidChangeValue(input)
})
assert.strictEqual(await loadAndPrompt(), testItems[0].data)
})
}) | the_stack |
import * as aws from "@pulumi/aws";
import * as pulumi from "@pulumi/pulumi";
import * as x from "..";
import { Cidr32Block, getIPv4Address } from "./cidr";
import * as utils from "../utils";
/** @internal */
export interface AvailabilityZoneDescription {
name: string;
id: string;
}
export function create(
resource: pulumi.Resource | undefined, vpcName: string, vpcCidr: string,
ipv6CidrBlock: pulumi.Output<string> | undefined, availabilityZones: AvailabilityZoneDescription[],
numberOfNatGateways: number, assignGeneratedIpv6CidrBlock: pulumi.Input<boolean>,
subnetArgsArray: x.ec2.VpcSubnetArgs[]) {
// Check if the subnets were given explicit location information or not. If so, we'll
// respect the location information the user asked for. If not, we'll automatically
// smartly partition the vpc.
const firstSubnet = subnetArgsArray[0];
const hasExplicitLocation = !!firstSubnet.location;
for (const subnet of subnetArgsArray) {
const siblingHasLocation = !!subnet.location;
if (hasExplicitLocation !== siblingHasLocation) {
throw new pulumi.ResourceError("[location] property must be specified for either no subnets or all of the subnets.", resource);
}
if (siblingHasLocation && subnet.cidrMask !== undefined) {
throw new pulumi.ResourceError("Subnet cannot specify [location] and [cidrMask]", resource);
}
}
const topology = hasExplicitLocation
? new ExplicitLocationTopology(resource, vpcName, vpcCidr, ipv6CidrBlock, availabilityZones, numberOfNatGateways, assignGeneratedIpv6CidrBlock)
: new ComputedLocationTopology(resource, vpcName, vpcCidr, ipv6CidrBlock, availabilityZones, numberOfNatGateways, assignGeneratedIpv6CidrBlock);
return topology.create(subnetArgsArray);
}
/** @internal */
abstract class VpcTopology {
protected readonly vpcCidrBlock: Cidr32Block;
protected lastAllocatedSubnetCidrBlock?: Cidr32Block;
constructor(protected readonly resource: pulumi.Resource | undefined,
protected readonly vpcName: string,
vpcCidr: string,
protected readonly ipv6CidrBlock: pulumi.Output<string> | undefined,
protected readonly availabilityZones: AvailabilityZoneDescription[],
protected readonly numberOfNatGateways: number,
protected readonly assignGeneratedIpv6CidrBlock: pulumi.Input<boolean>) {
this.vpcCidrBlock = Cidr32Block.fromCidrNotation(vpcCidr);
}
public abstract create(subnetArgsArray: x.ec2.VpcSubnetArgs[]): VpcTopologyDescription;
protected shouldCreateNatGateways(publicSubnets: SubnetDescription[], privateSubnets: SubnetDescription[]) {
// To make natgateways:
// 1. we have to have at least been asked to make some nat gateways.
// 2. we need public subnets to actually place the nat gateways in.
// 3. we need private subnets that will actually be connected to the nat gateways.
return this.numberOfNatGateways > 0 && publicSubnets.length > 0 && privateSubnets.length > 0;
}
}
class ComputedLocationTopology extends VpcTopology {
public create(subnetArgsArray: x.ec2.VpcSubnetArgs[]): VpcTopologyDescription {
const maskedSubnets = subnetArgsArray.filter(s => s.cidrMask !== undefined);
const unmaskedSubnets = subnetArgsArray.filter(s => s.cidrMask === undefined);
const subnetDescriptions: SubnetDescription[] = [];
const natGateways: NatGatewayDescription[] = [];
const natRoutes: NatRouteDescription[] = [];
// First, break up the available vpc cidr block to each subnet based on the amount of space
// they request.
for (const subnetArgs of maskedSubnets) {
subnetDescriptions.push(...this.createSubnetsWorker(subnetArgs, subnetArgs.cidrMask!, subnetDescriptions.length));
}
// Then, take the remaining subnets can break the remaining space up to them.
const cidrMaskForUnmaskedSubnets = this.computeCidrMaskForSubnets(unmaskedSubnets, unmaskedSubnets.length > 0);
for (const subnetArgs of unmaskedSubnets) {
subnetDescriptions.push(...this.createSubnetsWorker(subnetArgs, cidrMaskForUnmaskedSubnets, subnetDescriptions.length));
}
const publicSubnets = subnetDescriptions.filter(d => d.type === "public");
const privateSubnets = subnetDescriptions.filter(d => d.type === "private");
// Create nat gateways if we have private subnets and we have public subnets to place them in.
if (this.shouldCreateNatGateways(publicSubnets, privateSubnets)) {
const numberOfAvailabilityZones = this.availabilityZones.length;
if (this.numberOfNatGateways > numberOfAvailabilityZones) {
throw new Error(`[numberOfNatGateways] cannot be greater than [numberOfAvailabilityZones]: ${this.numberOfNatGateways} > ${numberOfAvailabilityZones}`);
}
for (let i = 0; i < this.numberOfNatGateways; i++) {
// Each public subnet was already created across all availability zones. So, to
// maximize coverage of availability zones, we can just walk the public subnets and
// create a nat gateway for it's availability zone. If more natgateways were
// requested then we'll just round-robin them among the availability zones.
// this indexing is safe since we would have created the any subnet across all
// availability zones.
const publicSubnetIndex = i % numberOfAvailabilityZones;
natGateways.push({
name: `${this.vpcName}-${i}`,
publicSubnet: publicSubnets[publicSubnetIndex].subnetName,
});
}
let roundRobinIndex = 0;
// We created subnets 'numberOfAvailabilityZones' at a time. So just jump through them in
// chunks of that size.
for (let i = 0, n = privateSubnets.length; i < n; i += numberOfAvailabilityZones) {
// For each chunk of subnets, we will have spread them across all the availability
// zones. We also created a nat gateway per availability zone *up to*
// numberOfNatGateways. So for the subnets in an availability zone that we created a
// nat gateway in, just route to that nat gateway. For the other subnets that are
// in an availability zone without a nat gateway, we just round-robin between any
// nat gateway we created.
for (let j = 0; j < numberOfAvailabilityZones; j++) {
const privateSubnetIndex = i + j;
const natGatewayIndex = j < this.numberOfNatGateways
? j
: (roundRobinIndex++ % natGateways.length);
natRoutes.push({
name: `nat-${j}`,
privateSubnet: privateSubnets[privateSubnetIndex].subnetName,
natGateway: natGateways[natGatewayIndex].name,
});
}
}
}
return { subnets: subnetDescriptions, natGateways, natRoutes };
}
private computeCidrMaskForSubnets(subnets: x.ec2.VpcSubnetArgs[], checkResult: boolean): number {
// We need one cidr block for each of these subnets in each availability zone.
const requiredCidrBlockCount = subnets.length * this.availabilityZones.length;
const firstAvailableIp = this.getNextCidrBlockStartingAddress();
const availableIps = this.vpcCidrBlock.endIpAddressExclusive - firstAvailableIp;
const ipsPerBlock = Math.floor(availableIps / requiredCidrBlockCount);
// ipsPerBlock is going to be some random integer. However, we need to get the number of
// mask bits that corresponds to the closest power of 2 that is smaller. for example If we
// can split the remaining space into 300 ips per block, then we need to actually only
// allocate 256 ips per block. If we were to allocate 512, we'd run out of space. So
// we get the log base 2, and round down so that we get 8 in this case.
//
// However, that value corresponds to the trailing mask bits, whereas we want the leading
// bits. So take that value and subtract from 32 to get the final amount we need.
const result = 32 - Math.floor(Math.log2(ipsPerBlock));
if (checkResult) {
if (result > 28) {
// subnets cannot be this small as per: https://aws.amazon.com/vpc/faqs/ The minimum
// size of a subnet is a /28 (or 14 IP addresses.) for IPv4. Subnets cannot be
// larger than the VPC in which they are created.
throw new Error(
`Not enough address space in VPC to create desired subnet config.
VPC has ${availableIps} IPs, but is being asked to split into a total of ${requiredCidrBlockCount} subnets.
${requiredCidrBlockCount} subnets are necessary to have ${this.availabilityZones.length} AZ(s) each with ${subnets.length} subnet(s) in them.
This needs ${ipsPerBlock} IPs/subnet, which is smaller than the minimum (16) allowed by AWS.`);
}
}
return result;
}
private getNextCidrBlockStartingAddress() {
// If we are allocating our first subnet block. It starts where our vpc cidr block starts.
// Otherwise, it will start where our last block ends.
return !this.lastAllocatedSubnetCidrBlock
? this.vpcCidrBlock.startIpAddressInclusive
: this.lastAllocatedSubnetCidrBlock.endIpAddressExclusive;
}
private assignNextAvailableCidrBlock(mask: number): Cidr32Block {
// If we are allocating our first subnet block. It starts where our vpc cidr block starts.
// Otherwise, it will start where our last block ends.
const nextStartIpAddressInclusive = this.getNextCidrBlockStartingAddress();
const nextCidrBlock = new Cidr32Block(nextStartIpAddressInclusive, mask);
// Make sure this latest block doesn't go past the end of the cidr block of the vpc.
if (nextCidrBlock.endIpAddressExclusive > this.vpcCidrBlock.endIpAddressExclusive) {
const lastAllocatedIpAddress = getIPv4Address(nextCidrBlock.endIpAddressExclusive);
const lastVpcIpAddress = getIPv4Address(this.vpcCidrBlock.endIpAddressExclusive);
throw new Error(
`Subnet cidr block end ip address extends past that last legal ip address for the vpc.
${lastAllocatedIpAddress} > ${lastVpcIpAddress}`);
}
this.lastAllocatedSubnetCidrBlock = nextCidrBlock;
return nextCidrBlock;
}
private createSubnetsWorker(subnetArgs: x.ec2.VpcSubnetArgs, cidrMask: number, currentSubnetIndex: number) {
if (cidrMask < 16 || cidrMask > 28) {
throw new Error(`Cidr mask must be between "16" and "28" but was ${cidrMask}`);
}
const result: SubnetDescription[] = [];
const type = subnetArgs.type;
for (let i = 0; i < this.availabilityZones.length; i++) {
const subnetName = getSubnetName(this.vpcName, subnetArgs, i);
const assignIpv6AddressOnCreation = utils.ifUndefined(subnetArgs.assignIpv6AddressOnCreation, this.assignGeneratedIpv6CidrBlock);
const ipv6CidrBlock = this.createIpv6CidrBlock(assignIpv6AddressOnCreation, currentSubnetIndex++);
result.push({
type,
subnetName,
args: {
availabilityZone: this.availabilityZones[i].name,
availabilityZoneId: this.availabilityZones[i].id,
cidrBlock: this.assignNextAvailableCidrBlock(cidrMask).toString(),
ipv6CidrBlock,
// Allow the individual subnet to decide if it wants to be mapped. If not
// specified, default to mapping a public-ip open if the type is 'public', and
// not mapping otherwise.
mapPublicIpOnLaunch: utils.ifUndefined(subnetArgs.mapPublicIpOnLaunch, type === "public"),
assignIpv6AddressOnCreation,
tags: subnetArgs.tags,
},
ignoreChanges: subnetArgs.ignoreChanges,
});
}
return result;
function getSubnetName(vpcName: string, subnetArgs: x.ec2.VpcSubnetArgs, i: number) {
let subnetName = `${subnetArgs.type}-${i}`;
if (subnetArgs.name) {
subnetName = `${subnetArgs.name}-` + subnetName;
}
return `${vpcName}-${subnetName}`;
}
}
private createIpv6CidrBlock(
assignIpv6AddressOnCreation: pulumi.Input<boolean>,
index: number): pulumi.Output<string> {
const result = pulumi.all([this.ipv6CidrBlock, assignIpv6AddressOnCreation])
.apply(([vpcIpv6CidrBlock, assignIpv6AddressOnCreation]) => {
if (!assignIpv6AddressOnCreation) {
return undefined;
}
if (!vpcIpv6CidrBlock) {
throw new pulumi.ResourceError(
"Must set [assignGeneratedIpv6CidrBlock] to true on [Vpc] in order to assign ipv6 address to subnet.", this.resource);
}
// Should be of the form: 2600:1f16:110:2600::/56
const colonColonIndex = vpcIpv6CidrBlock.indexOf("::");
if (colonColonIndex < 0 ||
vpcIpv6CidrBlock.substr(colonColonIndex) !== "::/56") {
throw new pulumi.ResourceError(`Vpc ipv6 cidr block was not in an expected form: ${vpcIpv6CidrBlock}`, this.resource);
}
const header = vpcIpv6CidrBlock.substr(0, colonColonIndex);
if (!header.endsWith("00")) {
throw new pulumi.ResourceError(`Vpc ipv6 cidr block was not in an expected form: ${vpcIpv6CidrBlock}`, this.resource);
}
// trim off the 00, and then add 00, 01, 02, 03, etc.
const prefix = header.substr(0, header.length - 2);
return prefix + index.toString().padStart(2, "0") + "::/64";
});
return <pulumi.Output<string>>result;
}
}
class ExplicitLocationTopology extends VpcTopology {
public create(subnets: x.ec2.VpcSubnetArgs[]): VpcTopologyDescription {
const subnetDescriptions: SubnetDescription[] = [];
const natGateways: NatGatewayDescription[] = [];
const natRoutes: NatRouteDescription[] = [];
if (subnets.length > 0) {
// First, we'll create all the actual subnets, keeping track of which AZs they're in. This
// information will then be used to create the natgateways needed by the private subnets.
// The private subnets will need a natgateway created in some public subnet (ideally in the
// same AZ they are in).
type AZ = string | undefined;
const azToPublicSubnets = new Map<AZ, SubnetDescription[]>();
const azToPrivateSubnets = new Map<AZ, SubnetDescription[]>();
for (let i = 0, n = subnets.length; i < n; i++) {
const subnetArgs = subnets[i];
const location = typeof subnetArgs.location === "string"
? { cidrBlock: subnetArgs.location }
: subnetArgs.location!;
const type = subnetArgs.type;
const subnetName = subnetArgs.name || `${type}-${i}`;
const subnetDesc: SubnetDescription = {
subnetName,
type,
args: {
...location,
// Allow the individual subnet to decide if it wants to be mapped. If not
// specified, default to mapping a public-ip open if the type is 'public', and
// not mapping otherwise.
mapPublicIpOnLaunch: utils.ifUndefined(subnetArgs.mapPublicIpOnLaunch, type === "public"),
assignIpv6AddressOnCreation: utils.ifUndefined(subnetArgs.assignIpv6AddressOnCreation, this.assignGeneratedIpv6CidrBlock),
tags: subnetArgs.tags,
},
ignoreChanges: subnetArgs.ignoreChanges,
};
subnetDescriptions.push(subnetDesc);
const az = location.availabilityZone || location.availabilityZoneId;
const specificSubnetMap =
type === "public" ? azToPublicSubnets :
type === "private" ? azToPrivateSubnets : undefined;
if (specificSubnetMap) {
const specificSubnets = specificSubnetMap.get(az) || [];
specificSubnets.push(subnetDesc);
specificSubnetMap.set(az, specificSubnets);
}
}
const publicSubnets = subnetDescriptions.filter(d => d.type === "public");
const privateSubnets = subnetDescriptions.filter(d => d.type === "private");
if (this.shouldCreateNatGateways(publicSubnets, privateSubnets)) {
// Create nat gateways for our private subnets. First, collect the azs the private subnets
// are in. We'll try to ensure an actual nat gateway in those azs (which is only possible if
// we have a public subnet in that az). If there is no public subnet in that az, then just
// pick another public subnet and place the gateway there.
const azToNatGateway = new Map<string | undefined, NatGatewayDescription>();
// process AZs in sorted order. That way we always do things in the same order across
// runs.
const privateSubnetAzs = [...azToPrivateSubnets.keys()].sort();
const minNatGateways = Math.min(privateSubnetAzs.length, this.numberOfNatGateways);
const publicSubnetsWithNatGateway = new Set<SubnetDescription>();
for (let i = 0; i < minNatGateways; i++) {
const az = privateSubnetAzs[i];
// try to make a nat gateway in a public subnet in that az. If we don't have any public
// subnets in that az, use a public subnet from another az. It's not ideal, but it can
// at least route things.
let publicSubnetForAz: SubnetDescription | undefined;
if (azToPublicSubnets.has(az)) {
// ok, we've got a public subnet for this az. Just place hte natgateway in
// the first public subnet in that az.
publicSubnetForAz = azToPublicSubnets.get(az)![0];
}
else {
// ok, we don't have a public subnet in this az. Try to pick from any other
// public subnet in an az that doesn't currently have a nat gateway.
for (const [_, publicSubnetsForOtherAz] of azToPublicSubnets) {
if (!publicSubnetsWithNatGateway.has(publicSubnetsForOtherAz[0])) {
publicSubnetForAz = publicSubnetsForOtherAz[0];
break;
}
}
}
if (!publicSubnetForAz) {
// no free public subnet
continue;
}
publicSubnetsWithNatGateway.add(publicSubnetForAz);
const natGateway = { name: `${this.vpcName}-${i}`, publicSubnet: publicSubnetForAz.subnetName };
azToNatGateway.set(az, natGateway);
natGateways.push(natGateway);
}
// Now, go through every private subnet. Make a natgateway route for it. Try to pick
// a natgateway from it's az. Otherwise, pick some available natgateway otherwise.
let natGatewayRoundRobinIndex = 0;
let routeIndex = 0;
for (const az of privateSubnetAzs) {
const privateSubnetsInAz = azToPrivateSubnets.get(az)!;
const natGatewayInAz = azToNatGateway.get(az);
for (let i = 0, n = privateSubnetsInAz.length; i < n; i++) {
const privateSubnet = privateSubnetsInAz[i];
// If we have a nat gateway in this az, then use it. Otherwise, round robin
// through all the nat gateways.
const natGateway = natGatewayInAz
? natGatewayInAz
: natGateways[natGatewayRoundRobinIndex++ % natGateways.length];
natRoutes.push({
name: `nat-${routeIndex++}`,
privateSubnet: privateSubnet.subnetName,
natGateway: natGateway.name,
});
}
}
}
}
return { subnets: subnetDescriptions, natGateways, natRoutes };
}
}
/** @internal */
export interface VpcTopologyDescription {
subnets: SubnetDescription[];
natGateways: NatGatewayDescription[];
natRoutes: NatRouteDescription[];
}
/** @internal */
export interface SubnetDescription {
type: x.ec2.VpcSubnetType;
subnetName: string;
args: x.ec2.SubnetArgs;
ignoreChanges?: string[];
}
/** @internal */
export interface NatGatewayDescription {
name: string;
/** index of the public subnet that this nat gateway should live in. */
publicSubnet: string;
}
/** @internal */
export interface NatRouteDescription {
name: string;
/** The name of the private subnet that is getting the route */
privateSubnet: string;
/** The name of the nat gateway this private subnet is getting a route to. */
natGateway: string;
} | the_stack |
import { PAGInit } from '../src/pag';
import { PAGFile } from '../src/pag-file';
import { PAGView } from '../src/pag-view';
import { AudioPlayer } from './module/audio-player';
import { LayerType, PAG as PAGNamespace, PAGViewListenerEvent, ParagraphJustification } from '../src/types';
import { PAGComposition } from '../src/pag-composition';
import { PAGImageLayer } from '../src/pag-image-layer';
declare global {
interface Window {
VConsole: any;
}
}
let pagView: PAGView;
let pagFile: PAGFile;
let cacheEnabled: boolean;
let videoEnabled: boolean;
let globalCacheScale: number;
let videoEl: HTMLVideoElement;
let pagComposition: PAGComposition;
let audioEl: AudioPlayer;
let PAG: PAGNamespace;
let canvasElementSize = 640;
let isMobile = false;
window.onload = async () => {
PAG = await PAGInit({ locateFile: (file) => '../lib/' + file });
// Mobile
isMobile = /Mobi|Android|iPhone/i.test(navigator.userAgent);
if (isMobile) {
document
.querySelector('meta[name="viewport"]')
?.setAttribute(
'content',
'viewport-fit=cover, width=device-width, initial-scale=1.0, minimum-scale=1.0, maximum-scale=1.0, user-scalable=no',
);
await loadScript('https://unpkg.com/vconsole@latest/dist/vconsole.min.js');
const vconsole = new window.VConsole();
canvasElementSize = 320;
const canvas = document.getElementById('pag') as HTMLCanvasElement;
canvas.width = canvasElementSize;
canvas.height = canvasElementSize;
const tablecloth = document.getElementById('tablecloth');
tablecloth!.style.width = `${canvasElementSize}px`;
tablecloth!.style.height = `${canvasElementSize}px`;
}
console.log('wasm loaded!', PAG);
document.getElementById('waiting')!.style.display = 'none';
document.getElementById('container')!.style.display = isMobile ? 'block' : '';
// 加载测试字体
document.getElementById('btn-test-font')?.addEventListener('click', () => {
const url = './assets/SourceHanSerifCN-Regular.ttf';
fetch(url)
.then((response) => response.blob())
.then(async (blob) => {
const file = new window.File([blob], url.replace(/(.*\/)*([^.]+)/i, '$2'));
await PAG.PAGFont.registerFont('SourceHanSerifCN', file);
console.log(`已加载${file.name}`);
});
});
// 加载PAG
document.getElementById('btn-upload-pag')?.addEventListener('click', () => {
document.getElementById('upload-pag')?.click();
});
document.getElementById('upload-pag')?.addEventListener('change', (event: any) => {
if (event.target) {
createPAGView(event.target.files[0] as File);
}
});
document.getElementById('btn-test-vector-pag')?.addEventListener('click', () => {
const url = './assets/like.pag';
fetch(url)
.then((response) => response.blob())
.then((blob) => {
const file = new window.File([blob], url.replace(/(.*\/)*([^.]+)/i, '$2'));
createPAGView(file);
});
});
document.getElementById('btn-test-video-pag')?.addEventListener('click', () => {
const url = './assets/particle_video.pag';
fetch(url)
.then((response) => response.blob())
.then((blob) => {
const file = new window.File([blob], url.replace(/(.*\/)*([^.]+)/i, '$2'));
createPAGView(file);
});
});
document.getElementById('btn-test-text-pag')?.addEventListener('click', async () => {
const url = './assets/test2.pag';
const response = await fetch(url);
const blob = await response.blob();
const file = new window.File([blob], url.replace(/(.*\/)*([^.]+)/i, '$2'));
await createPAGView(file);
const textDoc = pagFile.getTextData(0);
console.log(textDoc);
textDoc.text = '替换后的文字🤔';
textDoc.fillColor = { red: 255, green: 255, blue: 255 };
textDoc.applyFill = true;
textDoc.backgroundAlpha = 100;
textDoc.backgroundColor = { red: 255, green: 0, blue: 0 };
textDoc.baselineShift = 200;
textDoc.fauxBold = true;
textDoc.fauxItalic = false;
textDoc.fontFamily = 'SourceHanSerifCN';
textDoc.fontSize = 100;
textDoc.justification = ParagraphJustification.CenterJustify;
textDoc.strokeWidth = 20;
textDoc.strokeColor = { red: 0, green: 0, blue: 0 };
textDoc.applyStroke = true;
textDoc.strokeOverFill = true;
textDoc.tracking = 600;
pagFile.replaceText(0, textDoc);
console.log(pagFile.getTextData(0));
await pagView.flush();
});
// Get PAGFile duration
document.getElementById('btn-pagfile-get-duration')?.addEventListener('click', () => {
const duration = pagFile.duration();
console.log(`PAGFile duration ${duration}`);
});
// PAGFile setDuration
document.getElementById('btn-pagfile-set-duration')?.addEventListener('click', () => {
const duration = Number((document.getElementById('input-pagfile-duration') as HTMLInputElement).value);
pagFile.setDuration(duration);
console.log(`Set PAGFile duration ${duration} `);
});
// Get timeStretchMode
document.getElementById('btn-pagfile-time-stretch-mode')?.addEventListener('click', () => {
const timeStretchMode = pagFile.timeStretchMode();
console.log(`PAGFile timeStretchMode ${timeStretchMode} `);
});
document.getElementById('btn-pagfile-set-time-stretch-mode')?.addEventListener('click', () => {
const mode = Number((document.getElementById('select-time-stretch-mode') as HTMLSelectElement).value);
pagFile.setTimeStretchMode(mode);
console.log(`Set PAGFile timeStretchMode ${mode}`);
});
// 控制
document.getElementById('btn-play')?.addEventListener('click', () => {
pagView.play();
audioEl.play();
console.log('开始');
});
document.getElementById('btn-pause')?.addEventListener('click', () => {
pagView.pause();
audioEl.pause();
console.log('暂停');
});
document.getElementById('btn-stop')?.addEventListener('click', () => {
pagView.stop();
audioEl.stop();
console.log('停止');
});
document.getElementById('btn-destroy')?.addEventListener('click', () => {
pagView.destroy();
audioEl.destroy();
console.log('销毁');
});
// 获取进度
document.getElementById('btn-getProgress')?.addEventListener('click', () => {
console.log(`当前进度:${pagView.getProgress()}`);
});
// 设置进度
document.getElementById('setProgress')?.addEventListener('click', () => {
let progress = Number((document.getElementById('progress') as HTMLInputElement).value);
if (!(progress >= 0 && progress <= 1)) {
alert('请输入0~1之间');
}
pagView.setProgress(progress);
console.log(`已设置进度:${progress}`);
});
// 设置循环次数
document.getElementById('setRepeatCount')?.addEventListener('click', () => {
let repeatCount = Number((document.getElementById('repeatCount') as HTMLInputElement).value);
pagView.setRepeatCount(repeatCount);
console.log(`已设置循环次数:${repeatCount}`);
});
// maxFrameRate
document.getElementById('btn-maxFrameRate')?.addEventListener('click', () => {
console.log(`maxFrameRate: ${pagView.maxFrameRate()}`);
});
document.getElementById('setMaxFrameRate')?.addEventListener('click', () => {
let maxFrameRate = Number((document.getElementById('maxFrameRate') as HTMLInputElement).value);
pagView.setMaxFrameRate(maxFrameRate);
});
// scaleMode
document.getElementById('btn-scaleMode')?.addEventListener('click', () => {
console.log(`scaleMode: ${pagView.scaleMode()}`);
});
document.getElementById('setScaleMode')?.addEventListener('click', () => {
let scaleMode = Number((document.getElementById('scaleMode') as HTMLSelectElement).value);
pagView.setScaleMode(scaleMode);
});
// videoEnabled
videoEnabled = true;
document.getElementById('btn-videoEnabled')?.addEventListener('click', () => {
videoEnabled = pagView.videoEnabled();
console.log(`videoEnabled status: ${videoEnabled}`);
});
document.getElementById('btn-setVideoEnabled')?.addEventListener('click', () => {
pagView.setVideoEnabled(!videoEnabled);
});
// cacheEnabled
cacheEnabled = true;
document.getElementById('btn-cacheEnabled')?.addEventListener('click', () => {
cacheEnabled = pagView.cacheEnabled();
console.log(`cacheEnabled status: ${cacheEnabled}`);
});
document.getElementById('btn-setCacheEnabled')?.addEventListener('click', () => {
pagView.setCacheEnabled(!cacheEnabled);
});
// PAGComposition
document.getElementById('btn-composition')?.addEventListener('click', () => {
testPAGCompositionAPi();
});
// freeCache
document.getElementById('btn-freeCache')?.addEventListener('click', () => {
pagView.freeCache();
});
// cacheScale
globalCacheScale = 1;
document.getElementById('btn-cacheScale')?.addEventListener('click', () => {
globalCacheScale = pagView.cacheScale();
console.log(`cacheScale status: ${globalCacheScale}`);
});
document.getElementById('btn-setCacheScale')?.addEventListener('click', () => {
let cacheScale = Number((document.getElementById('cacheScale') as HTMLInputElement).value);
if (!(cacheScale >= 0 && cacheScale <= 1)) {
alert('请输入0~1之间');
}
pagView.setCacheScale(cacheScale);
});
};
const existsLayer = (pagLayer: object) => {
if (pagLayer) return true;
console.log('no Layer');
return false;
};
// PAGComposition api test
const testPAGComposition: { [key: string]: () => void } = {
rect: () => {
console.log(`test result: width: ${pagComposition.width()}, height: ${pagComposition.height()}`);
},
setContentSize: () => {
pagComposition.setContentSize(360, 640);
console.log(`test setContentSize result: width: ${pagComposition.width()}, height: ${pagComposition.height()}`);
},
numChildren: () => {
console.log(`test numChildren: ${pagComposition.numChildren()}`);
},
getLayerAt: () => {
const pagLayer = pagComposition.getLayerAt(0);
if (!existsLayer(pagLayer)) return;
console.log(`test getLayerAt index 0, layerName: ${pagLayer.layerName()}`);
},
getLayersByName: () => {
const pagLayer = pagComposition.getLayerAt(0);
if (!existsLayer(pagLayer)) return;
const layerName = pagLayer.layerName();
const vectorPagLayer = pagComposition.getLayersByName(layerName);
for (let j = 0; j < vectorPagLayer.size(); j++) {
const pagLayerWasm = vectorPagLayer.get(j);
const pagLayer_1 = new PAG.PAGLayer(pagLayerWasm);
console.log(`test getLayersByName: layerName: ${pagLayer_1.layerName()}`);
}
},
audioStartTime: () => {
const audioStartTime = pagComposition.audioStartTime();
console.log('test audioStartTime:', audioStartTime);
},
audioMarkers: () => {
const audioMarkers = pagComposition.audioMarkers();
console.log(`test audioMarkers: size`, audioMarkers.size());
},
audioBytes: () => {
const audioBytes = pagComposition.audioBytes();
console.log('test audioBytes:', audioBytes);
},
getLayerIndex: () => {
const pagLayer = pagComposition.getLayerAt(0);
const index = pagComposition.getLayerIndex(pagLayer);
console.log(`test GetLayerIndex: ${index}`);
},
swapLayerAt: () => {
swapLayer('swapLayerAt');
},
swapLayer: () => {
swapLayer('swapLayer');
},
contains: () => {
const pagLayer = pagComposition.getLayerAt(0);
const isContains = pagComposition.contains(pagLayer);
if (isContains) {
console.log('test contains');
}
},
addLayer: () => {
const pagLayer = pagComposition.getLayerAt(0);
pagComposition.removeLayerAt(0);
const oldNum = pagComposition.numChildren();
const isSuccess: boolean = pagComposition.addLayer(pagLayer);
if (isSuccess) {
console.log(`test addLayer success: old num ${oldNum} current num ${pagComposition.numChildren()}`);
}
},
removeLayerAt: () => {
const oldNum = pagComposition.numChildren();
pagComposition.removeLayerAt(0);
console.log(
`test delete Layer[0] success: old LayersNum: ${oldNum} current LayersNum ${pagComposition.numChildren()}`,
);
},
removeAllLayers: () => {
const oldNum = pagComposition.numChildren();
pagComposition.removeAllLayers();
console.log(
`test removeAllLayers success: old LayersNum${oldNum} current LayersNum ${pagComposition.numChildren()}`,
);
},
};
const testPAGCompositionAPi = () => {
console.log(`-------------------TEST PAGCompositionAPI START--------------------- `);
Object.keys(testPAGComposition).forEach((key) => {
if (testPAGComposition.hasOwnProperty(key)) {
testPAGComposition[`${key}`]();
}
});
console.log(`-------------------TEST PAGCompositionAPI END--------------------- `);
};
const swapLayer = (type: string) => {
const pagLayer_0 = pagComposition.getLayerAt(0);
const pagLayer_1 = pagComposition.getLayerAt(1);
if (!pagLayer_0 || !pagLayer_1) {
console.log('No layer switching');
return;
}
const pagLayer_name_0 = pagLayer_0.layerName();
const pagLayer_name_1 = pagLayer_1.layerName();
if (type === 'swapLayer') {
pagComposition.swapLayer(pagLayer_0, pagLayer_1);
} else {
pagComposition.swapLayerAt(0, 1);
}
const pagLayer_exch_0 = pagComposition.getLayerAt(0);
const pagLayer_exch_1 = pagComposition.getLayerAt(1);
const pagLayer__exch_0 = pagLayer_exch_0.layerName();
const pagLayer__exch_1 = pagLayer_exch_1.layerName();
console.log(
`test ${type}: oldLayerName_0=${pagLayer_name_0}, oldLayerName_1=${pagLayer_name_1} exchange LayerName_0=${pagLayer__exch_0}, LayerName_1=${pagLayer__exch_1} `,
);
};
const createPAGView = async (file: File) => {
if (pagFile) pagFile.destroy();
if (pagView) pagView.destroy();
const decodeTime = performance.now();
pagFile = (await PAG.PAGFile.load(file)) as PAGFile;
document.getElementById('decode-time')!.innerText = `PAG File decode time: ${Math.floor(
performance.now() - decodeTime,
)}ms`;
const pagCanvas = document.getElementById('pag') as HTMLCanvasElement;
// pagCanvas.width = canvasElementSize;
// pagCanvas.height = canvasElementSize;
const initializedTime = performance.now();
pagView = (await PAG.PAGView.init(pagFile, pagCanvas)) as PAGView;
document.getElementById('initialized-time')!.innerText = `PAG View initialized time: ${Math.floor(
performance.now() - initializedTime,
)}ms`;
pagView.setRepeatCount(0);
// 绑定事件监听
pagView.addListener(PAGViewListenerEvent.onAnimationStart, (event) => {
console.log('onAnimationStart', event);
});
pagView.addListener(PAGViewListenerEvent.onAnimationEnd, (event) => {
console.log('onAnimationEnd', event);
});
pagView.addListener(PAGViewListenerEvent.onAnimationCancel, (event) => {
console.log('onAnimationCancel', event);
});
pagView.addListener(PAGViewListenerEvent.onAnimationRepeat, (event) => {
console.log('onAnimationRepeat', event);
audioEl.stop();
audioEl.play();
});
let lastProgress = 0;
let lastFlushedTime = 0;
let flushCount = 0; // Every 3 times update FPSinfo.
pagView.addListener(PAGViewListenerEvent.onAnimationPlay, (event) => {
console.log('onAnimationPlay', event);
lastFlushedTime = performance.now();
});
pagView.addListener(PAGViewListenerEvent.onAnimationPause, (event) => {
console.log('onAnimationPause', event);
});
pagView.addListener(PAGViewListenerEvent.onAnimationFlushed, (pagView: PAGView) => {
// console.log('onAnimationFlushed', pagView);
const progress = pagView.getProgress();
const time = performance.now();
if (progress !== lastProgress) {
flushCount += 1;
lastProgress = progress;
}
if (flushCount === 3) {
document.getElementById('fps')!.innerText = `PAG View FPS: ${Math.floor(1000 / ((time - lastFlushedTime) / 3))}`;
lastFlushedTime = time;
flushCount = 0;
}
});
document.getElementById('control')!.style.display = '';
// 图层编辑
const editableLayers = getEditableLayer(pagFile);
renderEditableLayer(editableLayers);
console.log(`已加载 ${file.name}`);
pagComposition = pagView.getComposition();
audioEl = new AudioPlayer(pagComposition.audioBytes());
return pagView;
};
const loadVideoReady = (el: HTMLVideoElement, src: string) => {
return new Promise((resolve) => {
const listener = () => {
el.removeEventListener('canplay', listener);
console.log('canplay');
resolve(true);
};
el.addEventListener('canplay', listener);
el.src = src;
});
};
const setVideoTime = (el: HTMLVideoElement, time: number) => {
return new Promise((resolve) => {
const listener = () => {
el.removeEventListener('timeupdate', listener);
console.log('timeupdate');
resolve(true);
};
el.addEventListener('timeupdate', listener);
el.currentTime = time;
});
};
const getEditableLayer = (pagFile: PAGFile) => {
const editableImageCount = pagFile.numImages();
const indices = pagFile.getEditableIndices(LayerType.Image);
let res: any[] = [];
for (let i = 0; i < indices.size(); i++) {
const imageLayers = pagFile.getLayersByEditableIndex(indices.get(i), LayerType.Image);
for (let j = 0; j < imageLayers.size(); j++) {
const layer = imageLayers.get(j) as PAGImageLayer;
const uniqueID = layer.uniqueID();
const layerType = layer.layerType();
const layerName = layer.layerName();
const alpha = layer.alpha();
const visible = layer.visible();
const editableIndex = layer.editableIndex();
const duration = layer.duration();
const frameRate = layer.frameRate();
const localStartTime = layer.startTime();
const startTime = layer.localTimeToGlobal(localStartTime);
res.push({ uniqueID, layerType, layerName, alpha, visible, editableIndex, frameRate, startTime, duration });
}
}
return res;
};
const renderEditableLayer = (editableLayers: any[]) => {
const editLayerContent = document.getElementById('editLayer-content');
const childNodes = editLayerContent!.childNodes;
if (childNodes.length > 0) {
childNodes.forEach((node) => editLayerContent?.removeChild(node));
}
const box = document.createElement('div');
box.className = 'mt-24';
box.innerText = 'Editable layer:';
editableLayers.forEach((layer) => {
const item = document.createElement('div');
item.className = 'mt-24';
item.innerText = `editableIndex: ${layer.editableIndex} startTime: ${layer.startTime} duration: ${layer.duration}`;
const replaceImageBtn = document.createElement('button');
replaceImageBtn.addEventListener('click', () => {
replaceImage(item, layer.editableIndex);
});
replaceImageBtn.style.marginLeft = '12px';
replaceImageBtn.innerText = '替换图片';
item.appendChild(replaceImageBtn);
const replaceVideoBtn = document.createElement('button');
replaceVideoBtn.addEventListener('click', () => {
replaceVideo(item, layer.editableIndex);
});
replaceVideoBtn.style.marginLeft = '12px';
replaceVideoBtn.innerText = '替换视频';
item.appendChild(replaceVideoBtn);
box.appendChild(item);
});
editLayerContent?.appendChild(box);
};
// 替换图片
const replaceImage = (element: HTMLDivElement, index: number) => {
const inputEl = document.createElement('input');
inputEl.type = 'file';
inputEl.style.display = 'none';
element.appendChild(inputEl);
inputEl.addEventListener('change', async (event: any) => {
const pagImage = await PAG.PAGImage.fromFile(event.target.files[0]);
const pagFile = pagView.getComposition();
pagFile.replaceImage(index, pagImage);
await pagView.flush();
pagImage.destroy();
});
inputEl.click();
element.removeChild(inputEl);
};
// 替换视频
const replaceVideo = (element: HTMLDivElement, index: number) => {
const inputEl = document.createElement('input');
inputEl.type = 'file';
inputEl.style.display = 'none';
element.appendChild(inputEl);
inputEl.addEventListener('change', async (event: any) => {
if (!videoEl) videoEl = document.createElement('video');
await loadVideoReady(videoEl, URL.createObjectURL(event.target.files[0]));
await setVideoTime(videoEl, 0.05);
const pagImage = PAG.PAGImage.fromSource(videoEl);
const pagFile = pagView.getComposition();
pagFile.replaceImage(index, pagImage);
await pagView.flush();
pagImage.destroy();
});
inputEl.click();
element.removeChild(inputEl);
};
const loadScript = (url: string) => {
return new Promise((resolve, reject) => {
const scriptEl = document.createElement('script');
scriptEl.type = 'text/javascript';
scriptEl.onload = () => {
resolve(true);
};
scriptEl.onerror = (e) => {
reject(e);
};
scriptEl.src = url;
document.body.appendChild(scriptEl);
});
}; | the_stack |
declare namespace enetPinusPb {
/**
* The Endian class contains values that denote the byte order used to represent multibyte numbers.
* The byte order is either bigEndian (most significant byte first) or littleEndian (least significant byte first).
* @version Egret 2.4
* @platform Web,Native
* @language en_US
*/
/**
* Endian 类中包含一些值,它们表示用于表示多字节数字的字节顺序。
* 字节顺序为 bigEndian(最高有效字节位于最前)或 littleEndian(最低有效字节位于最前)。
* @version Egret 2.4
* @platform Web,Native
* @language zh_CN
*/
class Endian {
/**
* Indicates the least significant byte of the multibyte number appears first in the sequence of bytes.
* The hexadecimal number 0x12345678 has 4 bytes (2 hexadecimal digits per byte). The most significant byte is 0x12. The least significant byte is 0x78. (For the equivalent decimal number, 305419896, the most significant digit is 3, and the least significant digit is 6).
* @version Egret 2.4
* @platform Web,Native
* @language en_US
*/
/**
* 表示多字节数字的最低有效字节位于字节序列的最前面。
* 十六进制数字 0x12345678 包含 4 个字节(每个字节包含 2 个十六进制数字)。最高有效字节为 0x12。最低有效字节为 0x78。(对于等效的十进制数字 305419896,最高有效数字是 3,最低有效数字是 6)。
* @version Egret 2.4
* @platform Web,Native
* @language zh_CN
*/
static LITTLE_ENDIAN: string;
/**
* Indicates the most significant byte of the multibyte number appears first in the sequence of bytes.
* The hexadecimal number 0x12345678 has 4 bytes (2 hexadecimal digits per byte). The most significant byte is 0x12. The least significant byte is 0x78. (For the equivalent decimal number, 305419896, the most significant digit is 3, and the least significant digit is 6).
* @version Egret 2.4
* @platform Web,Native
* @language en_US
*/
/**
* 表示多字节数字的最高有效字节位于字节序列的最前面。
* 十六进制数字 0x12345678 包含 4 个字节(每个字节包含 2 个十六进制数字)。最高有效字节为 0x12。最低有效字节为 0x78。(对于等效的十进制数字 305419896,最高有效数字是 3,最低有效数字是 6)。
* @version Egret 2.4
* @platform Web,Native
* @language zh_CN
*/
static BIG_ENDIAN: string;
}
const enum EndianConst {
LITTLE_ENDIAN = 0,
BIG_ENDIAN = 1
}
/**
* The ByteArray class provides methods and attributes for optimized reading and writing as well as dealing with binary data.
* Note: The ByteArray class is applied to the advanced developers who need to access data at the byte layer.
* @version Egret 2.4
* @platform Web,Native
* @includeExample egret/utils/ByteArray.ts
* @language en_US
*/
/**
* ByteArray 类提供用于优化读取、写入以及处理二进制数据的方法和属性。
* 注意:ByteArray 类适用于需要在字节层访问数据的高级开发人员。
* @version Egret 2.4
* @platform Web,Native
* @includeExample egret/utils/ByteArray.ts
* @language zh_CN
*/
class ByteArray {
/**
* @private
*/
protected bufferExtSize: number;
protected data: DataView;
protected _bytes: Uint8Array;
/**
* @private
*/
protected _position: number;
/**
*
* 已经使用的字节偏移量
* @protected
* @type {number}
* @memberOf ByteArray
*/
protected write_position: number;
/**
* Changes or reads the byte order; egret.EndianConst.BIG_ENDIAN or egret.EndianConst.LITTLE_EndianConst.
* @default egret.EndianConst.BIG_ENDIAN
* @version Egret 2.4
* @platform Web,Native
* @language en_US
*/
/**
* 更改或读取数据的字节顺序;egret.EndianConst.BIG_ENDIAN 或 egret.EndianConst.LITTLE_ENDIAN。
* @default egret.EndianConst.BIG_ENDIAN
* @version Egret 2.4
* @platform Web,Native
* @language zh_CN
*/
get endian(): string;
set endian(value: string);
protected $endian: EndianConst;
/**
* @version Egret 2.4
* @platform Web,Native
*/
constructor(buffer?: ArrayBuffer | Uint8Array, bufferExtSize?: number);
/**
* @deprecated
* @version Egret 2.4
* @platform Web,Native
*/
setArrayBuffer(buffer: ArrayBuffer): void;
/**
* 可读的剩余字节数
*
* @returns
*
* @memberOf ByteArray
*/
get readAvailable(): number;
get buffer(): ArrayBuffer;
get rawBuffer(): ArrayBuffer;
/**
* @private
*/
set buffer(value: ArrayBuffer);
get bytes(): Uint8Array;
/**
* @private
* @version Egret 2.4
* @platform Web,Native
*/
get dataView(): DataView;
/**
* @private
*/
set dataView(value: DataView);
/**
* @private
*/
get bufferOffset(): number;
/**
* The current position of the file pointer (in bytes) to move or return to the ByteArray object. The next time you start reading reading method call in this position, or will start writing in this position next time call a write method.
* @version Egret 2.4
* @platform Web,Native
* @language en_US
*/
/**
* 将文件指针的当前位置(以字节为单位)移动或返回到 ByteArray 对象中。下一次调用读取方法时将在此位置开始读取,或者下一次调用写入方法时将在此位置开始写入。
* @version Egret 2.4
* @platform Web,Native
* @language zh_CN
*/
get position(): number;
set position(value: number);
/**
* The length of the ByteArray object (in bytes).
* If the length is set to be larger than the current length, the right-side zero padding byte array.
* If the length is set smaller than the current length, the byte array is truncated.
* @version Egret 2.4
* @platform Web,Native
* @language en_US
*/
/**
* ByteArray 对象的长度(以字节为单位)。
* 如果将长度设置为大于当前长度的值,则用零填充字节数组的右侧。
* 如果将长度设置为小于当前长度的值,将会截断该字节数组。
* @version Egret 2.4
* @platform Web,Native
* @language zh_CN
*/
get length(): number;
set length(value: number);
protected _validateBuffer(value: number): void;
/**
* The number of bytes that can be read from the current position of the byte array to the end of the array data.
* When you access a ByteArray object, the bytesAvailable property in conjunction with the read methods each use to make sure you are reading valid data.
* @version Egret 2.4
* @platform Web,Native
* @language en_US
*/
/**
* 可从字节数组的当前位置到数组末尾读取的数据的字节数。
* 每次访问 ByteArray 对象时,将 bytesAvailable 属性与读取方法结合使用,以确保读取有效的数据。
* @version Egret 2.4
* @platform Web,Native
* @language zh_CN
*/
get bytesAvailable(): number;
/**
* Clears the contents of the byte array and resets the length and position properties to 0.
* @version Egret 2.4
* @platform Web,Native
* @language en_US
*/
/**
* 清除字节数组的内容,并将 length 和 position 属性重置为 0。
* @version Egret 2.4
* @platform Web,Native
* @language zh_CN
*/
clear(): void;
/**
* Read a Boolean value from the byte stream. Read a simple byte. If the byte is non-zero, it returns true; otherwise, it returns false.
* @return If the byte is non-zero, it returns true; otherwise, it returns false.
* @version Egret 2.4
* @platform Web,Native
* @language en_US
*/
/**
* 从字节流中读取布尔值。读取单个字节,如果字节非零,则返回 true,否则返回 false
* @return 如果字节不为零,则返回 true,否则返回 false
* @version Egret 2.4
* @platform Web,Native
* @language zh_CN
*/
readBoolean(): boolean;
/**
* Read signed bytes from the byte stream.
* @return An integer ranging from -128 to 127
* @version Egret 2.4
* @platform Web,Native
* @language en_US
*/
/**
* 从字节流中读取带符号的字节
* @return 介于 -128 和 127 之间的整数
* @version Egret 2.4
* @platform Web,Native
* @language zh_CN
*/
readByte(): number;
/**
* Read data byte number specified by the length parameter from the byte stream. Starting from the position specified by offset, read bytes into the ByteArray object specified by the bytes parameter, and write bytes into the target ByteArray
* @param bytes ByteArray object that data is read into
* @param offset Offset (position) in bytes. Read data should be written from this position
* @param length Byte number to be read Default value 0 indicates reading all available data
* @version Egret 2.4
* @platform Web,Native
* @language en_US
*/
/**
* 从字节流中读取 length 参数指定的数据字节数。从 offset 指定的位置开始,将字节读入 bytes 参数指定的 ByteArray 对象中,并将字节写入目标 ByteArray 中
* @param bytes 要将数据读入的 ByteArray 对象
* @param offset bytes 中的偏移(位置),应从该位置写入读取的数据
* @param length 要读取的字节数。默认值 0 导致读取所有可用的数据
* @version Egret 2.4
* @platform Web,Native
* @language zh_CN
*/
readBytes(bytes: ByteArray, offset?: number, length?: number): void;
/**
* Read an IEEE 754 double-precision (64 bit) floating point number from the byte stream
* @return Double-precision (64 bit) floating point number
* @version Egret 2.4
* @platform Web,Native
* @language en_US
*/
/**
* 从字节流中读取一个 IEEE 754 双精度(64 位)浮点数
* @return 双精度(64 位)浮点数
* @version Egret 2.4
* @platform Web,Native
* @language zh_CN
*/
readDouble(): number;
/**
* Read an IEEE 754 single-precision (32 bit) floating point number from the byte stream
* @return Single-precision (32 bit) floating point number
* @version Egret 2.4
* @platform Web,Native
* @language en_US
*/
/**
* 从字节流中读取一个 IEEE 754 单精度(32 位)浮点数
* @return 单精度(32 位)浮点数
* @version Egret 2.4
* @platform Web,Native
* @language zh_CN
*/
readFloat(): number;
/**
* Read a 32-bit signed integer from the byte stream.
* @return A 32-bit signed integer ranging from -2147483648 to 2147483647
* @version Egret 2.4
* @platform Web,Native
* @language en_US
*/
/**
* 从字节流中读取一个带符号的 32 位整数
* @return 介于 -2147483648 和 2147483647 之间的 32 位带符号整数
* @version Egret 2.4
* @platform Web,Native
* @language zh_CN
*/
readInt(): number;
/**
* Read a 16-bit signed integer from the byte stream.
* @return A 16-bit signed integer ranging from -32768 to 32767
* @version Egret 2.4
* @platform Web,Native
* @language en_US
*/
/**
* 从字节流中读取一个带符号的 16 位整数
* @return 介于 -32768 和 32767 之间的 16 位带符号整数
* @version Egret 2.4
* @platform Web,Native
* @language zh_CN
*/
readShort(): number;
/**
* Read unsigned bytes from the byte stream.
* @return A 32-bit unsigned integer ranging from 0 to 255
* @version Egret 2.4
* @platform Web,Native
* @language en_US
*/
/**
* 从字节流中读取无符号的字节
* @return 介于 0 和 255 之间的 32 位无符号整数
* @version Egret 2.4
* @platform Web,Native
* @language zh_CN
*/
readUnsignedByte(): number;
/**
* Read a 32-bit unsigned integer from the byte stream.
* @return A 32-bit unsigned integer ranging from 0 to 4294967295
* @version Egret 2.4
* @platform Web,Native
* @language en_US
*/
/**
* 从字节流中读取一个无符号的 32 位整数
* @return 介于 0 和 4294967295 之间的 32 位无符号整数
* @version Egret 2.4
* @platform Web,Native
* @language zh_CN
*/
readUnsignedInt(): number;
/**
* Read a 16-bit unsigned integer from the byte stream.
* @return A 16-bit unsigned integer ranging from 0 to 65535
* @version Egret 2.4
* @platform Web,Native
* @language en_US
*/
/**
* 从字节流中读取一个无符号的 16 位整数
* @return 介于 0 和 65535 之间的 16 位无符号整数
* @version Egret 2.4
* @platform Web,Native
* @language zh_CN
*/
readUnsignedShort(): number;
/**
* Read a UTF-8 character string from the byte stream Assume that the prefix of the character string is a short unsigned integer (use byte to express length)
* @return UTF-8 character string
* @version Egret 2.4
* @platform Web,Native
* @language en_US
*/
/**
* 从字节流中读取一个 UTF-8 字符串。假定字符串的前缀是无符号的短整型(以字节表示长度)
* @return UTF-8 编码的字符串
* @version Egret 2.4
* @platform Web,Native
* @language zh_CN
*/
readUTF(): string;
/**
* Read a UTF-8 byte sequence specified by the length parameter from the byte stream, and then return a character string
* @param Specify a short unsigned integer of the UTF-8 byte length
* @return A character string consists of UTF-8 bytes of the specified length
* @version Egret 2.4
* @platform Web,Native
* @language en_US
*/
/**
* 从字节流中读取一个由 length 参数指定的 UTF-8 字节序列,并返回一个字符串
* @param length 指明 UTF-8 字节长度的无符号短整型数
* @return 由指定长度的 UTF-8 字节组成的字符串
* @version Egret 2.4
* @platform Web,Native
* @language zh_CN
*/
readUTFBytes(length: number): string;
/**
* Write a Boolean value. A single byte is written according to the value parameter. If the value is true, write 1; if the value is false, write 0.
* @param value A Boolean value determining which byte is written. If the value is true, write 1; if the value is false, write 0.
* @version Egret 2.4
* @platform Web,Native
* @language en_US
*/
/**
* 写入布尔值。根据 value 参数写入单个字节。如果为 true,则写入 1,如果为 false,则写入 0
* @param value 确定写入哪个字节的布尔值。如果该参数为 true,则该方法写入 1;如果该参数为 false,则该方法写入 0
* @version Egret 2.4
* @platform Web,Native
* @language zh_CN
*/
writeBoolean(value: boolean): void;
/**
* Write a byte into the byte stream
* The low 8 bits of the parameter are used. The high 24 bits are ignored.
* @param value A 32-bit integer. The low 8 bits will be written into the byte stream
* @version Egret 2.4
* @platform Web,Native
* @language en_US
*/
/**
* 在字节流中写入一个字节
* 使用参数的低 8 位。忽略高 24 位
* @param value 一个 32 位整数。低 8 位将被写入字节流
* @version Egret 2.4
* @platform Web,Native
* @language zh_CN
*/
writeByte(value: number): void;
/**
* Write the byte sequence that includes length bytes in the specified byte array, bytes, (starting at the byte specified by offset, using a zero-based index), into the byte stream
* If the length parameter is omitted, the default length value 0 is used and the entire buffer starting at offset is written. If the offset parameter is also omitted, the entire buffer is written
* If the offset or length parameter is out of range, they are clamped to the beginning and end of the bytes array.
* @param bytes ByteArray Object
* @param offset A zero-based index specifying the position into the array to begin writing
* @param length An unsigned integer specifying how far into the buffer to write
* @version Egret 2.4
* @platform Web,Native
* @language en_US
*/
/**
* 将指定字节数组 bytes(起始偏移量为 offset,从零开始的索引)中包含 length 个字节的字节序列写入字节流
* 如果省略 length 参数,则使用默认长度 0;该方法将从 offset 开始写入整个缓冲区。如果还省略了 offset 参数,则写入整个缓冲区
* 如果 offset 或 length 超出范围,它们将被锁定到 bytes 数组的开头和结尾
* @param bytes ByteArray 对象
* @param offset 从 0 开始的索引,表示在数组中开始写入的位置
* @param length 一个无符号整数,表示在缓冲区中的写入范围
* @version Egret 2.4
* @platform Web,Native
* @language zh_CN
*/
writeBytes(bytes: ByteArray, offset?: number, length?: number): void;
/**
* Write an IEEE 754 double-precision (64 bit) floating point number into the byte stream
* @param value Double-precision (64 bit) floating point number
* @version Egret 2.4
* @platform Web,Native
* @language en_US
*/
/**
* 在字节流中写入一个 IEEE 754 双精度(64 位)浮点数
* @param value 双精度(64 位)浮点数
* @version Egret 2.4
* @platform Web,Native
* @language zh_CN
*/
writeDouble(value: number): void;
/**
* Write an IEEE 754 single-precision (32 bit) floating point number into the byte stream
* @param value Single-precision (32 bit) floating point number
* @version Egret 2.4
* @platform Web,Native
* @language en_US
*/
/**
* 在字节流中写入一个 IEEE 754 单精度(32 位)浮点数
* @param value 单精度(32 位)浮点数
* @version Egret 2.4
* @platform Web,Native
* @language zh_CN
*/
writeFloat(value: number): void;
/**
* Write a 32-bit signed integer into the byte stream
* @param value An integer to be written into the byte stream
* @version Egret 2.4
* @platform Web,Native
* @language en_US
*/
/**
* 在字节流中写入一个带符号的 32 位整数
* @param value 要写入字节流的整数
* @version Egret 2.4
* @platform Web,Native
* @language zh_CN
*/
writeInt(value: number): void;
/**
* Write a 16-bit integer into the byte stream. The low 16 bits of the parameter are used. The high 16 bits are ignored.
* @param value A 32-bit integer. Its low 16 bits will be written into the byte stream
* @version Egret 2.4
* @platform Web,Native
* @language en_US
*/
/**
* 在字节流中写入一个 16 位整数。使用参数的低 16 位。忽略高 16 位
* @param value 32 位整数,该整数的低 16 位将被写入字节流
* @version Egret 2.4
* @platform Web,Native
* @language zh_CN
*/
writeShort(value: number): void;
/**
* Write a 32-bit unsigned integer into the byte stream
* @param value An unsigned integer to be written into the byte stream
* @version Egret 2.4
* @platform Web,Native
* @language en_US
*/
/**
* 在字节流中写入一个无符号的 32 位整数
* @param value 要写入字节流的无符号整数
* @version Egret 2.4
* @platform Web,Native
* @language zh_CN
*/
writeUnsignedInt(value: number): void;
/**
* Write a 16-bit unsigned integer into the byte stream
* @param value An unsigned integer to be written into the byte stream
* @version Egret 2.5
* @platform Web,Native
* @language en_US
*/
/**
* 在字节流中写入一个无符号的 16 位整数
* @param value 要写入字节流的无符号整数
* @version Egret 2.5
* @platform Web,Native
* @language zh_CN
*/
writeUnsignedShort(value: number): void;
/**
* Write a UTF-8 string into the byte stream. The length of the UTF-8 string in bytes is written first, as a 16-bit integer, followed by the bytes representing the characters of the string
* @param value Character string value to be written
* @version Egret 2.4
* @platform Web,Native
* @language en_US
*/
/**
* 将 UTF-8 字符串写入字节流。先写入以字节表示的 UTF-8 字符串长度(作为 16 位整数),然后写入表示字符串字符的字节
* @param value 要写入的字符串值
* @version Egret 2.4
* @platform Web,Native
* @language zh_CN
*/
writeUTF(value: string): void;
/**
* Write a UTF-8 string into the byte stream. Similar to the writeUTF() method, but the writeUTFBytes() method does not prefix the string with a 16-bit length word
* @param value Character string value to be written
* @version Egret 2.4
* @platform Web,Native
* @language en_US
*/
/**
* 将 UTF-8 字符串写入字节流。类似于 writeUTF() 方法,但 writeUTFBytes() 不使用 16 位长度的词为字符串添加前缀
* @param value 要写入的字符串值
* @version Egret 2.4
* @platform Web,Native
* @language zh_CN
*/
writeUTFBytes(value: string): void;
/**
*
* @returns
* @version Egret 2.4
* @platform Web,Native
*/
toString(): string;
/**
* @private
* 将 Uint8Array 写入字节流
* @param bytes 要写入的Uint8Array
* @param validateBuffer
*/
_writeUint8Array(bytes: Uint8Array | ArrayLike<number>, validateBuffer?: boolean): void;
/**
* @param len
* @returns
* @version Egret 2.4
* @platform Web,Native
* @private
*/
validate(len: number): boolean;
/**********************/
/**********************/
/**
* @private
* @param len
* @param needReplace
*/
protected validateBuffer(len: number): void;
/**
* @private
* UTF-8 Encoding/Decoding
*/
private encodeUTF8;
/**
* @private
*
* @param data
* @returns
*/
private decodeUTF8;
/**
* @private
*
* @param code_point
*/
private encoderError;
/**
* @private
*
* @param fatal
* @param opt_code_point
* @returns
*/
private decoderError;
/**
* @private
*/
private EOF_byte;
/**
* @private
*/
private EOF_code_point;
/**
* @private
*
* @param a
* @param min
* @param max
*/
private inRange;
/**
* @private
*
* @param n
* @param d
*/
private div;
/**
* @private
*
* @param string
*/
private stringToCodePoints;
}
interface IPinusDecodeMessage {
id: number;
/**
* Message.TYPE_xxx
*/
type: number;
route: string;
body: any;
}
import { ByteArray } from "./ByteArray";
interface IPackage {
encode(type: number, body?: ByteArray): ByteArray;
decode(buffer: ByteArray): any;
}
class Package implements IPackage {
static TYPE_HANDSHAKE: number;
static TYPE_HANDSHAKE_ACK: number;
static TYPE_HEARTBEAT: number;
static TYPE_DATA: number;
static TYPE_KICK: number;
encode(type: number, body?: ByteArray): ByteArray;
decode(
buffer: ByteArray
): {
type: number;
body: ByteArray;
length: number;
};
}
interface IPinusProtos {
/**默认为0 */
version: any;
client: any;
server: any;
}
interface IPinusHandshake {
sys: any;
user: any;
}
type IPinusHandshakeCb = (userData: any) => void;
class PinusProtoHandler implements enet.IProtoHandler {
private _pkgUtil;
private _msgUtil;
private _protoVersion;
private _reqIdRouteMap;
private RES_OK;
private RES_FAIL;
private RES_OLD_CLIENT;
private _handShakeRes;
private JS_WS_CLIENT_TYPE;
private JS_WS_CLIENT_VERSION;
private _handshakeBuffer;
constructor();
private _heartbeatConfig;
get heartbeatConfig(): enet.IHeartBeatConfig;
get handShakeRes(): any;
/**
* 初始化
* @param protos
* @param useProtobuf
*/
init(protos: IPinusProtos, useProtobuf?: boolean): void;
private handshakeInit;
protoKey2Key(protoKey: any): string;
encodePkg<T>(pkg: enet.IPackage<T>, useCrypto?: boolean): enet.NetData;
encodeMsg<T>(msg: enet.IMessage<T, any>, useCrypto?: boolean): enet.NetData;
decodePkg<T>(data: enet.NetData): enet.IDecodePackage<T>;
}
class Protobuf {
static TYPES: any;
private static _clients;
private static _servers;
static init(protos: any): void;
static encode(route: string, msg: any): ByteArray;
static decode(route: string, buffer: ByteArray): any;
private static encodeProtos;
static decodeProtos(protos: any, buffer: ByteArray): any;
static encodeTag(type: number, tag: number): ByteArray;
static getHead(buffer: ByteArray): any;
static encodeProp(value: any, type: string, protos: any, buffer: ByteArray): void;
static decodeProp(type: string, protos: any, buffer: ByteArray): any;
static isSimpleType(type: string): boolean;
static encodeArray(array: Array<any>, proto: any, protos: any, buffer: ByteArray): void;
static decodeArray(array: Array<any>, type: string, protos: any, buffer: ByteArray): void;
static encodeUInt32(n: number): ByteArray;
static decodeUInt32(buffer: ByteArray): number;
static encodeSInt32(n: number): ByteArray;
static decodeSInt32(buffer: ByteArray): number;
}
import { ByteArray } from "./ByteArray";
class Protocol {
static strencode(str: string): ByteArray;
static strdecode(byte: ByteArray): string;
}
class Routedic {
private static _ids;
private static _names;
static init(dict: any): void;
static getID(name: string): any;
static getName(id: number): any;
}
} | the_stack |
import { copy, defineConfig, normalizeArray } from '@agile-ts/utils';
import { logCodeManager } from '../../logCodeManager';
import {
CreateStatePersistentConfigInterface,
EnhancedState,
StateIngestConfigInterface,
StateObserver,
StateObserversInterface,
} from '../../state';
import { Collection, DefaultItem, ItemKey } from '../collection';
import { GroupIngestConfigInterface, GroupObserver } from './group.observer';
import { ComputedTracker } from '../../computed';
import { Item } from '../item';
import { CollectionPersistent } from '../collection.persistent';
export class Group<
DataType extends DefaultItem = DefaultItem
> extends EnhancedState<Array<ItemKey>> {
// Collection the Group belongs to
collection: () => Collection<DataType>;
static rebuildGroupSideEffectKey = 'rebuildGroup';
// Item values represented by the Group
public _output: Array<DataType> = [];
// Next output of the Group (which can be used for dynamic Group updates)
public nextGroupOutput: Array<DataType> = [];
// Precise itemKeys of the Group only include itemKeys
// that actually exist in the corresponding Collection
public _preciseItemKeys: Array<ItemKey> = [];
// Manages dependencies to other States and subscriptions of UI-Components.
// It also serves as an interface to the runtime.
public observers: GroupObservers<ItemKey[], DataType> = {} as any;
// Keeps track of all Item identifiers for Items that couldn't be found in the Collection
public notFoundItemKeys: Array<ItemKey> = [];
// Whether the initial value was loaded from the corresponding Persistent
// https://github.com/agile-ts/agile/issues/155
public loadedInitialValue = true;
/**
* An extension of the State Class that categorizes and preserves the ordering of structured data.
* It allows us to cluster together data from a Collection as an array of Item keys.
*
* Note that a Group doesn't store the actual Items. It only keeps track of the Item keys
* and retrieves the fitting Items when needed.
*
* [Learn more..](https://agile-ts.org/docs/core/collection/group/)
*
* @public
* @param collection - Collection to which the Group belongs.
* @param initialItems - Key/Name identifiers of the Items to be clustered by the Group.
* @param config - Configuration object
*/
constructor(
collection: Collection<DataType>,
initialItems: Array<ItemKey> = [],
config: GroupConfigInterface = {}
) {
super(collection.agileInstance(), initialItems, config);
// Have to redefine the value Observer (observers['value']) again,
// although it was technically set in the State Parent
// https://github.com/microsoft/TypeScript/issues/1617
this.observers['value'] = new StateObserver<ItemKey[]>(this, {
key: config.key,
});
this.observers['output'] = new GroupObserver(this, {
key: config.key,
});
this.collection = () => collection;
// Add side effect to Group
// that rebuilds the Group whenever the Group value changes
this.addSideEffect(Group.rebuildGroupSideEffectKey, (state, config) => {
this.rebuild(config?.any?.trackedChanges || [], config);
});
// Initial rebuild
this.rebuild();
}
/**
* Returns the values of the Items clustered by the Group.
*
* [Learn more..](https://agile-ts.org/docs/core/collection/group/properties#output)
*
* @public
*/
public get output(): Array<DataType> {
ComputedTracker.tracked(this.observers['output']);
return copy(this._output);
}
public set output(value: DataType[]) {
logCodeManager.log('1C:03:00', { replacers: [this._key] });
}
/**
* Returns a boolean indicating whether an Item with the specified `itemKey`
* is clustered in the Group or not.
*
* [Learn more..](https://agile-ts.org/docs/core/collection/group/methods/#has)
*
* @public
* @param itemKey - Key/Name identifier of the Item.
*/
public has(itemKey: ItemKey) {
return this.value.indexOf(itemKey) !== -1;
}
/**
* Returns the count of Items clustered by the Group.
*
* [Learn more..](https://agile-ts.org/docs/core/collection/group/properties#size)
*
* @public
*/
public get size(): number {
return this.value.length;
}
/**
* Removes an Item with the specified key/name identifier from the Group,
* if it exists in the Group.
*
* [Learn more..](https://agile-ts.org/docs/core/collection/group/methods#remove)
*
* @public
* @param itemKeys - Key/Name identifier/s of the Item/s to be removed.
* @param config - Configuration object
*/
public remove(
itemKeys: ItemKey | ItemKey[],
config: GroupRemoveConfigInterface = {}
): this {
const _itemKeys = normalizeArray<ItemKey>(itemKeys);
const notExistingItemKeysInCollection: Array<ItemKey> = [];
const notExistingItemKeys: Array<ItemKey> = [];
let newGroupValue = copy(this.nextStateValue);
// Need to temporary update the preciseItemKeys
// since in the rebuild one action (trackedChanges) is performed after the other
// which requires a dynamic updated index
const updatedPreciseItemKeys = copy(this._preciseItemKeys);
config = defineConfig(config, {
softRebuild: true,
any: {},
});
config.any['trackedChanges'] = []; // TODO should be improved since the 'any' property is very vague
// Remove itemKeys from Group
_itemKeys.forEach((itemKey) => {
const exists = newGroupValue.includes(itemKey);
// Check if itemKey exists in Group
if (!exists) {
notExistingItemKeys.push(itemKey);
notExistingItemKeysInCollection.push(itemKey);
return;
}
// Track changes to soft rebuild the Group when rebuilding the Group in a side effect
if (config.softRebuild) {
const index = updatedPreciseItemKeys.findIndex((ik) => ik === itemKey);
if (index !== -1) {
updatedPreciseItemKeys.splice(index, 1);
config.any['trackedChanges'].push({
index,
method: TrackedChangeMethod.REMOVE,
key: itemKey,
});
}
}
// Check if itemKey exists in Collection
if (!this.collection().getItem(itemKey))
notExistingItemKeysInCollection.push(itemKey);
// Remove itemKey from Group
newGroupValue = newGroupValue.filter((key) => key !== itemKey);
});
// Return if none of the specified itemKeys exists
if (notExistingItemKeys.length >= _itemKeys.length) return this;
// If all removed itemKeys don't exist in the Collection
// -> no rerender necessary since the output won't change
if (notExistingItemKeysInCollection.length >= _itemKeys.length)
config.background = true;
this.set(newGroupValue, config);
return this;
}
/**
* Appends new Item/s to the end of the Group.
*
* [Learn more..](https://agile-ts.org/docs/core/collection/group/methods#add)
*
* @public
* @param itemKeys - Key/Name identifier/s of Item/s to be added.
* @param config - Configuration object
*/
public add(
itemKeys: ItemKey | ItemKey[],
config: GroupAddConfigInterface = {}
): this {
const _itemKeys = normalizeArray<ItemKey>(itemKeys);
const notExistingItemKeysInCollection: Array<ItemKey> = [];
const existingItemKeys: Array<ItemKey> = [];
const newGroupValue = copy(this.nextStateValue);
// Need to temporary update the preciseItemKeys
// since in the rebuild one action (trackedChanges) is performed after the other
// which requires a dynamic updated index
const updatedPreciseItemKeys = copy(this._preciseItemKeys);
config = defineConfig(config, {
method: 'push',
softRebuild: true,
any: {},
});
config.any['trackedChanges'] = []; // TODO should be improved since the 'any' property is very vague
// Add itemKeys to Group
_itemKeys.forEach((itemKey) => {
const exists = newGroupValue.includes(itemKey);
// Check if itemKey exists in Collection
if (!this.collection().getItem(itemKey))
notExistingItemKeysInCollection.push(itemKey);
// Handle existing Item
if (exists) {
existingItemKeys.push(itemKey);
return;
}
// Track changes to soft rebuild the Group when rebuilding the Group in a side effect
if (config.softRebuild) {
const index =
config.method === 'push' ? updatedPreciseItemKeys.length : 0;
updatedPreciseItemKeys.push(itemKey);
config.any['trackedChanges'].push({
index,
method: TrackedChangeMethod.ADD,
key: itemKey,
});
}
// Add new itemKey to Group
newGroupValue[config.method || 'push'](itemKey);
});
// Return if all specified itemKeys already exist
if (existingItemKeys.length >= _itemKeys.length) return this;
// If all added itemKeys don't exist in the Collection
// -> no rerender necessary since the output won't change
if (
notExistingItemKeysInCollection.concat(existingItemKeys).length >=
_itemKeys.length
)
config.background = true;
this.set(newGroupValue, config);
return this;
}
/**
* Replaces the old `itemKey` with a new specified `itemKey`.
*
* [Learn more..](https://agile-ts.org/docs/core/collection/group/methods#replace)
*
* @public
* @param oldItemKey - Old `itemKey` to be replaced.
* @param newItemKey - New `itemKey` to replace the before specified old `itemKey`.
* @param config - Configuration object
*/
public replace(
oldItemKey: ItemKey,
newItemKey: ItemKey,
config: StateIngestConfigInterface = {}
): this {
const newGroupValue = copy(this._value);
newGroupValue.splice(newGroupValue.indexOf(oldItemKey), 1, newItemKey);
this.set(newGroupValue, config);
return this;
}
/**
* Retrieves all existing Items of the Group from the corresponding Collection and returns them.
* Items that aren't present in the Collection are skipped.
*
* [Learn more..](https://agile-ts.org/docs/core/collection/group/methods#getitems)
*
* @public
*/
public getItems(): Array<Item<DataType>> {
return this.value
.map((itemKey) => this.collection().getItem(itemKey))
.filter((item): item is Item<DataType> => item !== undefined);
}
/**
* Preserves the Group `value` in the corresponding external Storage.
*
* The Group key/name is used as the unique identifier for the Persistent.
* If that is not desired or the Group has no unique identifier,
* please specify a separate unique identifier for the Persistent.
*
* [Learn more..](https://agile-ts.org/docs/core/state/methods/#persist)
*
* @public
* @param config - Configuration object
*/
public persist(config: GroupPersistConfigInterface = {}): this {
config = defineConfig(config, {
key: this._key,
followCollectionPersistKeyPattern: true,
});
// Create storageItemKey based on Collection key/name identifier
if (config.followCollectionPersistKeyPattern) {
config.key = CollectionPersistent.getGroupStorageKey(
config.key || this._key,
this.collection()._key
);
}
// Persist Group
super.persist(config);
return this;
}
/**
* Rebuilds the output of the Group
* and ingests it into the runtime.
*
* In doing so, it traverses the Group `value` (Item identifiers)
* and fetches the fitting Items accordingly.
*
* [Learn more..](https://agile-ts.org/docs/core/collection/group/methods#rebuild)
*
* @internal
* @param trackedChanges - Changes that were tracked between two rebuilds.
* @param config - Configuration object
*/
public rebuild(
trackedChanges: TrackedChangeInterface[] = [],
config: GroupIngestConfigInterface = {}
): this {
// Don't rebuild Group if Collection isn't correctly instantiated yet
// (because only after a successful instantiation the Collection
// contains the Items which are essential for a proper rebuild)
if (!this.collection().isInstantiated) return this;
// Item keys that couldn't be found in the Collection
const notFoundItemKeys: Array<ItemKey> = [];
// Soft rebuild the Collection (-> rebuild only parts of the Collection)
if (trackedChanges.length > 0) {
trackedChanges.forEach((change) => {
const item = this.collection().getItem(change.key);
switch (change.method) {
case TrackedChangeMethod.ADD:
// this._value.splice(change.index, 0, change.key); // Already updated in 'add' method
if (item != null) {
this._preciseItemKeys.splice(change.index, 0, change.key);
this.nextGroupOutput.splice(change.index, 0, copy(item._value));
} else {
notFoundItemKeys.push(change.key);
}
break;
case TrackedChangeMethod.UPDATE:
if (item != null) {
this.nextGroupOutput[change.index] = copy(item._value);
} else {
notFoundItemKeys.push(change.key);
}
break;
case TrackedChangeMethod.REMOVE:
// this._value.splice(change.index, 1); // Already updated in 'remove' method
this._preciseItemKeys.splice(change.index, 1);
this.nextGroupOutput.splice(change.index, 1);
break;
default:
break;
}
});
this.observers['output'].ingest(config);
}
// Hard rebuild the whole Collection
else {
const groupItemValues: Array<DataType> = [];
// Reset precise itemKeys array to rebuild it from scratch
this._preciseItemKeys = [];
// Fetch Items from Collection
this._value.forEach((itemKey) => {
const item = this.collection().getItem(itemKey);
if (item != null) {
groupItemValues.push(item._value);
this._preciseItemKeys.push(itemKey);
} else notFoundItemKeys.push(itemKey);
});
// Ingest rebuilt Group output into the Runtime
this.observers['output'].ingestOutput(groupItemValues, config);
}
// Logging
if (notFoundItemKeys.length > 0 && this.loadedInitialValue) {
logCodeManager.log(
'1C:02:00',
{ replacers: [this.collection()._key, this._key] },
notFoundItemKeys
);
}
this.notFoundItemKeys = notFoundItemKeys;
return this;
}
}
export type GroupKey = string | number;
export interface GroupObservers<ValueType = any, DataType = any>
extends StateObserversInterface<ValueType> {
/**
* Observer responsible for the output of the Group.
*/
output: GroupObserver<DataType>;
}
export interface GroupAddConfigInterface extends StateIngestConfigInterface {
/**
* In which way the `itemKey` should be added to the Group.
* - 'push' = at the end
* - 'unshift' = at the beginning
* https://www.tutorialspoint.com/what-are-the-differences-between-unshift-and-push-methods-in-javascript
* @default 'push'
*/
method?: 'unshift' | 'push';
/**
* Whether to soft rebuild the Group.
* -> only rebuild the parts of the Group that have actually changed
* instead of rebuilding the whole Group.
* @default true
*/
softRebuild?: boolean;
}
export interface GroupRemoveConfigInterface extends StateIngestConfigInterface {
/**
* Whether to soft rebuild the Group.
* -> only rebuild the parts of the Group that have actually changed
* instead of rebuilding the whole Group.
* @default true
*/
softRebuild?: boolean;
}
export interface GroupConfigInterface {
/**
* Key/Name identifier of the Group.
* @default undefined
*/
key?: GroupKey;
/**
* Whether the Group should be a placeholder
* and therefore should only exist in the background.
* @default false
*/
isPlaceholder?: boolean;
}
export interface GroupPersistConfigInterface
extends CreateStatePersistentConfigInterface {
/**
* Whether to format the specified Storage key following the Collection Group Storage key pattern.
* `_${collectionKey}_group_${groupKey}`
* @default true
*/
followCollectionPersistKeyPattern?: boolean;
}
export enum TrackedChangeMethod {
ADD,
REMOVE,
UPDATE,
}
export interface TrackedChangeInterface {
/**
* What type of change the tracked change is.
* @default undefined
*/
method: TrackedChangeMethod;
/**
* Item key of the tracked change.
* @default undefined
*/
key: ItemKey;
/**
* Current index in the Group value of the tracked change.
* @default undefined
*/
index: number;
} | the_stack |
import { solveRoute } from "../src/solveRoute";
import * as fetchMock from "fetch-mock";
import { Solve, SolveNoDirections, SolveWebMercator } from "./mocks/responses";
import { IPoint, ILocation, IFeatureSet } from "@esri/arcgis-rest-types";
// -117.195677,34.056383;-117.918976,33.812092
const stops: Array<[number, number]> = [
[-117.195677, 34.056383],
[-117.918976, 33.812092],
];
// -117.195677,34.056383,10.11;-117.918976,33.812092,8.43
const stops3: Array<[number, number, number]> = [
[-117.195677, 34.056383, 10.11],
[-117.918976, 33.812092, 8.43],
];
const stopsObjectsLatLong: ILocation[] = [
{
lat: 34.056383,
long: -117.195677,
},
{
lat: 33.812092,
long: -117.918976,
},
];
const stopsObjectsLatLong3: ILocation[] = [
{
lat: 34.056383,
long: -117.195677,
z: 10.11,
},
{
lat: 33.812092,
long: -117.918976,
z: 8.43,
},
];
const stopsObjectsLatitudeLongitude: ILocation[] = [
{
latitude: 34.056383,
longitude: -117.195677,
},
{
latitude: 33.812092,
longitude: -117.918976,
},
];
const stopsObjectsLatitudeLongitude3: ILocation[] = [
{
latitude: 34.056383,
longitude: -117.195677,
z: 10.11,
},
{
latitude: 33.812092,
longitude: -117.918976,
z: 8.43,
},
];
const stopsObjectsPoint: IPoint[] = [
{
x: -117.195677,
y: 34.056383,
},
{
x: -117.918976,
y: 33.812092,
},
];
const stopsObjectsPoint3: IPoint[] = [
{
x: -117.195677,
y: 34.056383,
z: 10.11,
},
{
x: -117.918976,
y: 33.812092,
z: 8.43,
},
];
const stopsFeatureSet: IFeatureSet = {
features: [
{
geometry: {
x: -117.195677,
y: 34.056383,
spatialReference: {
wkid: 4326,
},
} as IPoint,
attributes: {},
},
{
geometry: {
x: -117.918976,
y: 33.812092,
spatialReference: {
wkid: 4326,
},
} as IPoint,
attributes: {},
},
],
};
// const customRoutingUrl =
// "https://foo.com/ArcGIS/rest/services/Network/USA/NAServer/";
describe("solveRoute", () => {
afterEach(fetchMock.restore);
it("should throw an error when a solveRoute request is made without a token", (done) => {
fetchMock.once("*", Solve);
solveRoute({
stops,
})
// tslint:disable-next-line
.catch((e) => {
expect(e).toEqual(
"Routing using the ArcGIS service requires authentication"
);
done();
});
});
it("should make a simple solveRoute request (array of stops)", (done) => {
fetchMock.once("*", Solve);
const MOCK_AUTH = {
getToken() {
return Promise.resolve("token");
},
portal: "https://mapsdev.arcgis.com",
};
solveRoute({ stops, authentication: MOCK_AUTH })
.then((response) => {
expect(fetchMock.called()).toEqual(true);
const [url, options]: [string, RequestInit] = fetchMock.lastCall("*");
expect(url).toEqual(
"https://route.arcgis.com/arcgis/rest/services/World/Route/NAServer/Route_World/solve"
);
expect(options.method).toBe("POST");
expect(options.body).toContain("f=json");
// "stops=-117.195677,34.056383;-117.918976,33.812092"
expect(options.body).toContain(
`stops=${encodeURIComponent(
"-117.195677,34.056383;-117.918976,33.812092"
)}`
);
expect(options.body).toContain("token=token");
expect(response.routes.spatialReference.latestWkid).toEqual(4326);
expect(response.routes.features[0].attributes.Name).toEqual(
"Location 1 - Location 2"
);
done();
})
.catch((e) => {
fail(e);
});
});
it("should make a simple solveRoute request (array of 3d stops)", (done) => {
fetchMock.once("*", Solve);
const MOCK_AUTH = {
getToken() {
return Promise.resolve("token");
},
portal: "https://mapsdev.arcgis.com",
};
solveRoute({ stops: stops3, authentication: MOCK_AUTH })
.then((response) => {
expect(fetchMock.called()).toEqual(true);
const [url, options]: [string, RequestInit] = fetchMock.lastCall("*");
expect(url).toEqual(
"https://route.arcgis.com/arcgis/rest/services/World/Route/NAServer/Route_World/solve"
);
expect(options.method).toBe("POST");
expect(options.body).toContain("f=json");
// "stops=-117.195677,34.056383,10.11;-117.918976,33.812092,8.43"
expect(options.body).toContain(
`stops=${encodeURIComponent(
"-117.195677,34.056383,10.11;-117.918976,33.812092,8.43"
)}`
);
expect(options.body).toContain("token=token");
expect(response.routes.spatialReference.latestWkid).toEqual(4326);
expect(response.routes.features[0].attributes.Name).toEqual(
"Location 1 - Location 2"
);
done();
})
.catch((e) => {
fail(e);
});
});
it("should make a simple solveRoute request (array of objects - lat/lon)", (done) => {
fetchMock.once("*", Solve);
const MOCK_AUTH = {
getToken() {
return Promise.resolve("token");
},
portal: "https://mapsdev.arcgis.com",
};
solveRoute({
stops: stopsObjectsLatLong,
authentication: MOCK_AUTH,
})
.then((response) => {
expect(fetchMock.called()).toEqual(true);
const [url, options]: [string, RequestInit] = fetchMock.lastCall("*");
expect(url).toEqual(
"https://route.arcgis.com/arcgis/rest/services/World/Route/NAServer/Route_World/solve"
);
expect(options.method).toBe("POST");
expect(options.body).toContain("f=json");
// "stops=-117.195677,34.056383;-117.918976,33.812092"
expect(options.body).toContain(
`stops=${encodeURIComponent(
"-117.195677,34.056383;-117.918976,33.812092"
)}`
);
expect(options.body).toContain("token=token");
expect(response.routes.spatialReference.latestWkid).toEqual(4326);
expect(response.routes.features[0].attributes.Name).toEqual(
"Location 1 - Location 2"
);
done();
})
.catch((e) => {
fail(e);
});
});
it("should make a simple solveRoute request (array of objects - 3d lat/lon)", (done) => {
fetchMock.once("*", Solve);
const MOCK_AUTH = {
getToken() {
return Promise.resolve("token");
},
portal: "https://mapsdev.arcgis.com",
};
solveRoute({
stops: stopsObjectsLatLong3,
authentication: MOCK_AUTH,
})
.then((response) => {
expect(fetchMock.called()).toEqual(true);
const [url, options]: [string, RequestInit] = fetchMock.lastCall("*");
expect(url).toEqual(
"https://route.arcgis.com/arcgis/rest/services/World/Route/NAServer/Route_World/solve"
);
expect(options.method).toBe("POST");
expect(options.body).toContain("f=json");
// "stops=-117.195677,34.056383,10.11;-117.918976,33.812092,8.43"
expect(options.body).toContain(
`stops=${encodeURIComponent(
"-117.195677,34.056383,10.11;-117.918976,33.812092,8.43"
)}`
);
expect(options.body).toContain("token=token");
expect(response.routes.spatialReference.latestWkid).toEqual(4326);
expect(response.routes.features[0].attributes.Name).toEqual(
"Location 1 - Location 2"
);
done();
})
.catch((e) => {
fail(e);
});
});
it("should make a simple solveRoute request (array of objects - latitude/longitude)", (done) => {
fetchMock.once("*", Solve);
const MOCK_AUTH = {
getToken() {
return Promise.resolve("token");
},
portal: "https://mapsdev.arcgis.com",
};
solveRoute({
stops: stopsObjectsLatitudeLongitude,
authentication: MOCK_AUTH,
})
.then((response) => {
expect(fetchMock.called()).toEqual(true);
const [url, options]: [string, RequestInit] = fetchMock.lastCall("*");
expect(url).toEqual(
"https://route.arcgis.com/arcgis/rest/services/World/Route/NAServer/Route_World/solve"
);
expect(options.method).toBe("POST");
expect(options.body).toContain("f=json");
// "stops=-117.195677,34.056383;-117.918976,33.812092"
expect(options.body).toContain(
`stops=${encodeURIComponent(
"-117.195677,34.056383;-117.918976,33.812092"
)}`
);
expect(options.body).toContain("token=token");
expect(response.routes.spatialReference.latestWkid).toEqual(4326);
expect(response.routes.features[0].attributes.Name).toEqual(
"Location 1 - Location 2"
);
done();
})
.catch((e) => {
fail(e);
});
});
it("should make a simple solveRoute request (array of objects - 3d latitude/longitude)", (done) => {
fetchMock.once("*", Solve);
const MOCK_AUTH = {
getToken() {
return Promise.resolve("token");
},
portal: "https://mapsdev.arcgis.com",
};
solveRoute({
stops: stopsObjectsLatitudeLongitude3,
authentication: MOCK_AUTH,
})
.then((response) => {
expect(fetchMock.called()).toEqual(true);
const [url, options]: [string, RequestInit] = fetchMock.lastCall("*");
expect(url).toEqual(
"https://route.arcgis.com/arcgis/rest/services/World/Route/NAServer/Route_World/solve"
);
expect(options.method).toBe("POST");
expect(options.body).toContain("f=json");
// "stops=-117.195677,34.056383,10.11;-117.918976,33.812092,8.43"
expect(options.body).toContain(
`stops=${encodeURIComponent(
"-117.195677,34.056383,10.11;-117.918976,33.812092,8.43"
)}`
);
expect(options.body).toContain("token=token");
expect(response.routes.spatialReference.latestWkid).toEqual(4326);
expect(response.routes.features[0].attributes.Name).toEqual(
"Location 1 - Location 2"
);
done();
})
.catch((e) => {
fail(e);
});
});
it("should make a simple solveRoute request (array of objects - latitude/longitude)", (done) => {
fetchMock.once("*", Solve);
const MOCK_AUTH = {
getToken() {
return Promise.resolve("token");
},
portal: "https://mapsdev.arcgis.com",
};
solveRoute({
stops: stopsObjectsLatitudeLongitude,
authentication: MOCK_AUTH,
})
.then((response) => {
expect(fetchMock.called()).toEqual(true);
const [url, options]: [string, RequestInit] = fetchMock.lastCall("*");
expect(url).toEqual(
"https://route.arcgis.com/arcgis/rest/services/World/Route/NAServer/Route_World/solve"
);
expect(options.method).toBe("POST");
expect(options.body).toContain("f=json");
// "stops=-117.195677,34.056383;-117.918976,33.812092"
expect(options.body).toContain(
`stops=${encodeURIComponent(
"-117.195677,34.056383;-117.918976,33.812092"
)}`
);
expect(options.body).toContain("token=token");
expect(response.routes.spatialReference.latestWkid).toEqual(4326);
expect(response.routes.features[0].attributes.Name).toEqual(
"Location 1 - Location 2"
);
done();
})
.catch((e) => {
fail(e);
});
});
it("should make a simple solveRoute request (array of objects - 3d latitude/longitude)", (done) => {
fetchMock.once("*", Solve);
const MOCK_AUTH = {
getToken() {
return Promise.resolve("token");
},
portal: "https://mapsdev.arcgis.com",
};
solveRoute({
stops: stopsObjectsLatitudeLongitude3,
authentication: MOCK_AUTH,
})
.then((response) => {
expect(fetchMock.called()).toEqual(true);
const [url, options]: [string, RequestInit] = fetchMock.lastCall("*");
expect(url).toEqual(
"https://route.arcgis.com/arcgis/rest/services/World/Route/NAServer/Route_World/solve"
);
expect(options.method).toBe("POST");
expect(options.body).toContain("f=json");
// "stops=-117.195677,34.056383,10.11;-117.918976,33.812092,8.43"
expect(options.body).toContain(
`stops=${encodeURIComponent(
"-117.195677,34.056383,10.11;-117.918976,33.812092,8.43"
)}`
);
expect(options.body).toContain("token=token");
expect(response.routes.spatialReference.latestWkid).toEqual(4326);
expect(response.routes.features[0].attributes.Name).toEqual(
"Location 1 - Location 2"
);
done();
})
.catch((e) => {
fail(e);
});
});
it("should make a simple solveRoute request (array of IPoint)", (done) => {
fetchMock.once("*", Solve);
const MOCK_AUTH = {
getToken() {
return Promise.resolve("token");
},
portal: "https://mapsdev.arcgis.com",
};
solveRoute({
stops: stopsObjectsPoint,
authentication: MOCK_AUTH,
})
.then((response) => {
expect(fetchMock.called()).toEqual(true);
const [url, options]: [string, RequestInit] = fetchMock.lastCall("*");
expect(url).toEqual(
"https://route.arcgis.com/arcgis/rest/services/World/Route/NAServer/Route_World/solve"
);
expect(options.method).toBe("POST");
expect(options.body).toContain("f=json");
// "stops=-117.195677,34.056383;-117.918976,33.812092"
expect(options.body).toContain(
`stops=${encodeURIComponent(
"-117.195677,34.056383;-117.918976,33.812092"
)}`
);
expect(options.body).toContain("token=token");
expect(response.routes.spatialReference.latestWkid).toEqual(4326);
expect(response.routes.features[0].attributes.Name).toEqual(
"Location 1 - Location 2"
);
done();
})
.catch((e) => {
fail(e);
});
});
it("should make a simple solveRoute request (array of 3d IPoint)", (done) => {
fetchMock.once("*", Solve);
const MOCK_AUTH = {
getToken() {
return Promise.resolve("token");
},
portal: "https://mapsdev.arcgis.com",
};
solveRoute({
stops: stopsObjectsPoint3,
authentication: MOCK_AUTH,
})
.then((response) => {
expect(fetchMock.called()).toEqual(true);
const [url, options]: [string, RequestInit] = fetchMock.lastCall("*");
expect(url).toEqual(
"https://route.arcgis.com/arcgis/rest/services/World/Route/NAServer/Route_World/solve"
);
expect(options.method).toBe("POST");
expect(options.body).toContain("f=json");
// "stops=-117.195677,34.056383,10.11;-117.918976,33.812092,8.43"
expect(options.body).toContain(
`stops=${encodeURIComponent(
"-117.195677,34.056383,10.11;-117.918976,33.812092,8.43"
)}`
);
expect(options.body).toContain("token=token");
expect(response.routes.spatialReference.latestWkid).toEqual(4326);
expect(response.routes.features[0].attributes.Name).toEqual(
"Location 1 - Location 2"
);
done();
})
.catch((e) => {
fail(e);
});
});
it("should make a simple solveRoute request (FeatureSet)", (done) => {
fetchMock.once("*", Solve);
const MOCK_AUTH = {
getToken() {
return Promise.resolve("token");
},
portal: "https://mapsdev.arcgis.com",
};
solveRoute({
stops: stopsFeatureSet,
authentication: MOCK_AUTH,
})
.then((response) => {
expect(fetchMock.called()).toEqual(true);
const [url, options]: [string, RequestInit] = fetchMock.lastCall("*");
expect(url).toEqual(
"https://route.arcgis.com/arcgis/rest/services/World/Route/NAServer/Route_World/solve"
);
expect(options.method).toBe("POST");
expect(options.body).toContain("f=json");
expect(options.body).toContain(
`stops=${encodeURIComponent(JSON.stringify(stopsFeatureSet))}`
);
expect(options.body).toContain("token=token");
expect(response.routes.spatialReference.latestWkid).toEqual(4326);
expect(response.routes.features[0].attributes.Name).toEqual(
"Location 1 - Location 2"
);
done();
})
.catch((e) => {
fail(e);
});
});
it("should transform compressed geometry into geometry", (done) => {
fetchMock.once("*", Solve);
const MOCK_AUTH = {
getToken() {
return Promise.resolve("token");
},
portal: "https://mapsdev.arcgis.com",
};
solveRoute({
stops: stopsObjectsPoint,
authentication: MOCK_AUTH,
})
.then((response) => {
expect(fetchMock.called()).toEqual(true);
expect(response.directions[0].features[0].geometry).toEqual(
jasmine.any(Object)
);
done();
})
.catch((e) => {
fail(e);
});
});
it("should not fail when no directions are returned", (done) => {
fetchMock.once("*", SolveNoDirections);
const MOCK_AUTH = {
getToken() {
return Promise.resolve("token");
},
portal: "https://mapsdev.arcgis.com",
};
solveRoute({
stops: stopsObjectsPoint,
authentication: MOCK_AUTH,
params: {
returnDirections: false,
},
})
.then((response) => {
expect(fetchMock.called()).toEqual(true);
expect(response.directions).toEqual(undefined);
done();
})
.catch((e) => {
fail(e);
});
});
it("should include routes.geoJson in the return", (done) => {
fetchMock.once("*", Solve);
const MOCK_AUTH = {
getToken() {
return Promise.resolve("token");
},
portal: "https://mapsdev.arcgis.com",
};
solveRoute({
stops: stopsObjectsPoint,
authentication: MOCK_AUTH,
})
.then((response) => {
expect(fetchMock.called()).toEqual(true);
const [url, options]: [string, RequestInit] = fetchMock.lastCall("*");
expect(Object.keys(response.routes)).toContain("geoJson");
done();
})
.catch((e) => {
fail(e);
});
});
it("should not include routes.geoJson in the return for non-4326", (done) => {
fetchMock.once("*", SolveWebMercator);
const MOCK_AUTH = {
getToken() {
return Promise.resolve("token");
},
portal: "https://mapsdev.arcgis.com",
};
solveRoute({
stops: stopsObjectsPoint,
authentication: MOCK_AUTH,
params: {
outSR: 102100,
},
})
.then((response) => {
expect(fetchMock.called()).toEqual(true);
const [url, options]: [string, RequestInit] = fetchMock.lastCall("*");
expect(Object.keys(response.routes)).not.toContain("geoJson");
done();
})
.catch((e) => {
fail(e);
});
});
}); | the_stack |
import { Marketplace as MarketplaceContract } from "contracts";
import { ContractMetadata } from "../core/classes/contract-metadata";
import { ContractRoles } from "../core/classes/contract-roles";
import { ContractEncoder } from "../core/classes/contract-encoder";
import { IStorage } from "../core/interfaces/IStorage";
import { NetworkOrSignerOrProvider, TransactionResult } from "../core/types";
import { SDKOptions } from "../schema/sdk-options";
import { ContractWrapper } from "../core/classes/contract-wrapper";
import { UpdateableNetwork } from "../core/interfaces/contract";
import { MarketplaceContractSchema } from "../schema/contracts/marketplace";
import { AuctionListing, DirectListing } from "../types/marketplace";
import { ListingType } from "../enums";
import { BigNumber, BigNumberish, constants } from "ethers";
import invariant from "tiny-invariant";
import { ListingNotFoundError } from "../common";
import { MarketplaceFilter } from "../types/marketplace/MarketPlaceFilter";
import { getRoleHash } from "../common/role";
import { MarketplaceDirect } from "../core/classes/marketplace-direct";
import { MarketplaceAuction } from "../core/classes/marketplace-auction";
import { DEFAULT_QUERY_ALL_COUNT } from "../types/QueryParams";
import { GasCostEstimator } from "../core/classes/gas-cost-estimator";
import { ContractInterceptor } from "../core/classes/contract-interceptor";
import { ContractEvents } from "../core/classes/contract-events";
import { ContractPlatformFee } from "../core/classes/contract-platform-fee";
import { ContractAnalytics } from "../core/classes/contract-analytics";
/**
* Create your own whitelabel marketplace that enables users to buy and sell any digital assets.
*
* @example
*
* ```javascript
* import { ThirdwebSDK } from "@thirdweb-dev/sdk";
*
* const sdk = new ThirdwebSDK("rinkeby");
* const contract = sdk.getMarketplace("{{contract_address}}");
* ```
*
* @public
*/
export class Marketplace implements UpdateableNetwork {
static contractType = "marketplace" as const;
static contractRoles = ["admin", "lister", "asset"] as const;
static contractAbi = require("../../abis/Marketplace.json");
/**
* @internal
*/
static schema = MarketplaceContractSchema;
private contractWrapper: ContractWrapper<MarketplaceContract>;
private storage: IStorage;
public encoder: ContractEncoder<MarketplaceContract>;
public events: ContractEvents<MarketplaceContract>;
public estimator: GasCostEstimator<MarketplaceContract>;
public platformFees: ContractPlatformFee<MarketplaceContract>;
/**
* @internal
*/
public analytics: ContractAnalytics<MarketplaceContract>;
public metadata: ContractMetadata<
MarketplaceContract,
typeof Marketplace.schema
>;
public roles: ContractRoles<
MarketplaceContract,
typeof Marketplace.contractRoles[number]
>;
/**
* @internal
*/
public interceptor: ContractInterceptor<MarketplaceContract>;
/**
* Direct listings
* @remarks Create and manage direct listings in your marketplace.
* @example
* ```javascript
* // Data of the listing you want to create
* const listing = {
* // address of the NFT contract the asset you want to list is on
* assetContractAddress: "0x...",
* // token ID of the asset you want to list
* tokenId: "0",
* // when should the listing open up for offers
* startTimestamp: new Date(),
* // how long the listing will be open for
* listingDurationInSeconds: 86400,
* // how many of the asset you want to list
* quantity: 1,
* // address of the currency contract that will be used to pay for the listing
* currencyContractAddress: NATIVE_TOKEN_ADDRESS,
* // how much the asset will be sold for
* buyoutPricePerToken: "1.5",
* }
*
* const tx = await contract.direct.createListing(listing);
* const receipt = tx.receipt; // the transaction receipt
* const listingId = tx.id; // the id of the newly created listing
*
* // And on the buyers side:
* // Quantity of the asset you want to buy
* const quantityDesired = 1;
* await contract.direct.buyoutListing(listingId, quantityDesired);
* ```
*/
public direct: MarketplaceDirect;
/**
* Auctions
* @remarks Create and manage auctions in your marketplace.
* @example
* ```javascript
* // Data of the auction you want to create
* const auction = {
* // address of the contract the asset you want to list is on
* assetContractAddress: "0x...",
* // token ID of the asset you want to list
* tokenId: "0",
* // when should the listing open up for offers
* startTimestamp: new Date(),
* // how long the listing will be open for
* listingDurationInSeconds: 86400,
* // how many of the asset you want to list
* quantity: 1,
* // address of the currency contract that will be used to pay for the listing
* currencyContractAddress: NATIVE_TOKEN_ADDRESS,
* // how much people would have to bid to instantly buy the asset
* buyoutPricePerToken: "10",
* // the minimum bid that will be accepted for the token
* reservePricePerToken: "1.5",
* }
*
* const tx = await contract.auction.createListing(auction);
* const receipt = tx.receipt; // the transaction receipt
* const listingId = tx.id; // the id of the newly created listing
*
* // And on the buyers side:
* // The price you are willing to bid for a single token of the listing
* const pricePerToken = 2.6;
* await contract.auction.makeBid(listingId, pricePerToken);
* ```
*/
public auction: MarketplaceAuction;
constructor(
network: NetworkOrSignerOrProvider,
address: string,
storage: IStorage,
options: SDKOptions = {},
contractWrapper = new ContractWrapper<MarketplaceContract>(
network,
address,
Marketplace.contractAbi,
options,
),
) {
this.contractWrapper = contractWrapper;
this.storage = storage;
this.metadata = new ContractMetadata(
this.contractWrapper,
Marketplace.schema,
this.storage,
);
this.roles = new ContractRoles(
this.contractWrapper,
Marketplace.contractRoles,
);
this.analytics = new ContractAnalytics(this.contractWrapper);
this.encoder = new ContractEncoder(this.contractWrapper);
this.estimator = new GasCostEstimator(this.contractWrapper);
this.direct = new MarketplaceDirect(this.contractWrapper, this.storage);
this.auction = new MarketplaceAuction(this.contractWrapper, this.storage);
this.events = new ContractEvents(this.contractWrapper);
this.platformFees = new ContractPlatformFee(this.contractWrapper);
this.interceptor = new ContractInterceptor(this.contractWrapper);
}
onNetworkUpdated(network: NetworkOrSignerOrProvider) {
this.contractWrapper.updateSignerOrProvider(network);
}
getAddress(): string {
return this.contractWrapper.readContract.address;
}
/** ******************************
* READ FUNCTIONS
*******************************/
/**
* Convenience function to get either a direct or auction listing
*
* @param listingId - the listing id
* @returns either a direct or auction listing
*
* @remarks Get a listing by its listing id
* @example
* ```javascript
* const listingId = 0;
* const listing = await contract.getListing(listingId);
* ```
*/
public async getListing(
listingId: BigNumberish,
): Promise<AuctionListing | DirectListing> {
const listing = await this.contractWrapper.readContract.listings(listingId);
if (listing.assetContract === constants.AddressZero) {
throw new ListingNotFoundError(this.getAddress(), listingId.toString());
}
switch (listing.listingType) {
case ListingType.Auction: {
return await this.auction.mapListing(listing);
}
case ListingType.Direct: {
return await this.direct.mapListing(listing);
}
default: {
throw new Error(`Unknown listing type: ${listing.listingType}`);
}
}
}
/**
* Get all active listings
*
* @remarks Fetch all the active listings from this marketplace contract. An active listing means it can be bought or bid on.
* @example
* ```javascript
* const listings = await contract.getActiveListings();
* const priceOfFirstActiveListing = listings[0].price;
* ```
* @param filter - optional filter parameters
*/
public async getActiveListings(
filter?: MarketplaceFilter,
): Promise<(AuctionListing | DirectListing)[]> {
const rawListings = await this.getAllListingsNoFilter();
const filtered = this.applyFilter(rawListings, filter);
const now = BigNumber.from(Math.floor(Date.now() / 1000));
return filtered.filter((l) => {
return (
(l.type === ListingType.Auction &&
BigNumber.from(l.endTimeInEpochSeconds).gt(now) &&
BigNumber.from(l.startTimeInEpochSeconds).lte(now)) ||
(l.type === ListingType.Direct && l.quantity > 0)
);
});
}
/**
* Get all the listings
*
* @remarks Fetch all the listings from this marketplace contract, including sold ones.
* @example
* ```javascript
* const listings = await contract.getAllListings();
* const priceOfFirstListing = listings[0].price;
* ```
*
* @param filter - optional filter parameters
*/
public async getAllListings(
filter?: MarketplaceFilter,
): Promise<(AuctionListing | DirectListing)[]> {
const rawListings = await this.getAllListingsNoFilter();
return this.applyFilter(rawListings, filter);
}
/**
* @internal
*/
public getAll = this.getAllListings;
/**
* Get the total number of Listings
* @returns the total number listings on the marketplace
* @public
*/
public async getTotalCount(): Promise<BigNumber> {
return await this.contractWrapper.readContract.totalListings();
}
/**
* Get whether listing is restricted only to addresses with the Lister role
*/
public async isRestrictedToListerRoleOnly(): Promise<boolean> {
const anyoneCanList = await this.contractWrapper.readContract.hasRole(
getRoleHash("lister"),
constants.AddressZero,
);
return !anyoneCanList;
}
/**
* Get the buffer in basis points between offers
*/
public async getBidBufferBps(): Promise<BigNumber> {
return this.contractWrapper.readContract.bidBufferBps();
}
/**
* get the buffer time in seconds between offers
*/
public async getTimeBufferInSeconds(): Promise<BigNumber> {
return this.contractWrapper.readContract.timeBuffer();
}
/** ******************************
* WRITE FUNCTIONS
*******************************/
/**
* Purchase NFTs
* @remarks Buy a Direct or Auction listing on your marketplace.
* @example
* ```javascript
* // The listing ID of the asset you want to buy
* const listingId = 0;
* // Quantity of the asset you want to buy
* const quantityDesired = 1;
*
* await contract.buyoutListing(listingId, quantityDesired);
* ```
* @param listingId - the listing ID of the listing you want to buy
* @param quantityDesired - the quantity that you want to buy (for ERC1155 tokens)
* @param receiver - optional receiver of the bought listing if different from the connected wallet (for direct listings only)
*/
public async buyoutListing(
listingId: BigNumberish,
quantityDesired?: BigNumberish,
receiver?: string,
): Promise<TransactionResult> {
const listing = await this.contractWrapper.readContract.listings(listingId);
if (listing.listingId.toString() !== listingId.toString()) {
throw new ListingNotFoundError(this.getAddress(), listingId.toString());
}
switch (listing.listingType) {
case ListingType.Direct: {
invariant(
quantityDesired !== undefined,
"quantityDesired is required when buying out a direct listing",
);
return await this.direct.buyoutListing(
listingId,
quantityDesired,
receiver,
);
}
case ListingType.Auction: {
return await this.auction.buyoutListing(listingId);
}
default:
throw Error(`Unknown listing type: ${listing.listingType}`);
}
}
/**
* Set the Auction bid buffer
* @remarks A percentage (e.g. 5%) in basis points (5% = 500, 100% = 10000). A new bid is considered to be a winning bid only if its bid amount is at least the bid buffer (e.g. 5%) greater than the previous winning bid. This prevents buyers from making very slightly higher bids to win the auctioned items.
* @example
* ```javascript
* // the bid buffer in basis points
* const bufferBps = 5_00; // 5%
* await contract.setBidBufferBps(bufferBps);
* ```
* @param bufferBps - the bps value
*/
public async setBidBufferBps(bufferBps: BigNumberish): Promise<void> {
await this.roles.verify(
["admin"],
await this.contractWrapper.getSignerAddress(),
);
const timeBuffer = await this.getTimeBufferInSeconds();
await this.contractWrapper.sendTransaction("setAuctionBuffers", [
timeBuffer,
BigNumber.from(bufferBps),
]);
}
/**
* Set the Auction Time buffer:
* @remarks Measured in seconds (e.g. 15 minutes or 900 seconds). If a winning bid is made within the buffer of the auction closing (e.g. 15 minutes within the auction closing), the auction's closing time is increased by the buffer to prevent buyers from making last minute winning bids, and to give time to other buyers to make a higher bid if they wish to.
* @example
* ```javascript
* // the time buffer in seconds
* const bufferInSeconds = 60;
* await contract.setTimeBufferInSeconds(bufferInSeconds);
* ```
* @param bufferInSeconds - the seconds value
*/
public async setTimeBufferInSeconds(
bufferInSeconds: BigNumberish,
): Promise<void> {
await this.roles.verify(
["admin"],
await this.contractWrapper.getSignerAddress(),
);
const bidBuffer = await this.getBidBufferBps();
await this.contractWrapper.sendTransaction("setAuctionBuffers", [
BigNumber.from(bufferInSeconds),
bidBuffer,
]);
}
/**
* Restrict listing NFTs only from the specified NFT contract address.
* It is possible to allow listing from multiple contract addresses.
* @param contractAddress - the NFT contract address
*/
public async allowListingFromSpecificAssetOnly(contractAddress: string) {
const encoded = [];
const members = await this.roles.get("asset");
if (members.includes(constants.AddressZero)) {
encoded.push(
this.encoder.encode("revokeRole", [
getRoleHash("asset"),
constants.AddressZero,
]),
);
}
encoded.push(
this.encoder.encode("grantRole", [getRoleHash("asset"), contractAddress]),
);
await this.contractWrapper.multiCall(encoded);
}
/**
* Allow listings from any NFT contract
*/
public async allowListingFromAnyAsset() {
const encoded = [];
const members = await this.roles.get("asset");
for (const addr in members) {
encoded.push(
this.encoder.encode("revokeRole", [getRoleHash("asset"), addr]),
);
}
encoded.push(
this.encoder.encode("grantRole", [
getRoleHash("asset"),
constants.AddressZero,
]),
);
await this.contractWrapper.multiCall(encoded);
}
/** ******************************
* PRIVATE FUNCTIONS
*******************************/
private async getAllListingsNoFilter(): Promise<
(AuctionListing | DirectListing)[]
> {
const listings = await Promise.all(
Array.from(
Array(
(await this.contractWrapper.readContract.totalListings()).toNumber(),
).keys(),
).map(async (i) => {
let listing;
try {
listing = await this.getListing(i);
} catch (err) {
console.warn(`Error fetching listing with id: ${i}`, err);
return undefined;
}
if (listing.type === ListingType.Auction) {
return listing;
}
const valid = await this.direct.isStillValidListing(listing);
if (!valid) {
return undefined;
}
return listing;
}),
);
return listings.filter((l) => l !== undefined) as (
| AuctionListing
| DirectListing
)[];
}
private applyFilter(
listings: (AuctionListing | DirectListing)[],
filter?: MarketplaceFilter,
) {
let rawListings = [...listings];
const start = BigNumber.from(filter?.start || 0).toNumber();
const count = BigNumber.from(
filter?.count || DEFAULT_QUERY_ALL_COUNT,
).toNumber();
if (filter) {
if (filter.seller) {
rawListings = rawListings.filter(
(seller) =>
seller.sellerAddress.toString().toLowerCase() ===
filter?.seller?.toString().toLowerCase(),
);
}
if (filter.tokenContract) {
rawListings = rawListings.filter(
(tokenContract) =>
tokenContract.assetContractAddress.toString().toLowerCase() ===
filter?.tokenContract?.toString().toLowerCase(),
);
}
if (filter.tokenId !== undefined) {
rawListings = rawListings.filter(
(tokenContract) =>
tokenContract.tokenId.toString() === filter?.tokenId?.toString(),
);
}
rawListings = rawListings.filter((_, index) => index >= start);
rawListings = rawListings.slice(0, count);
}
return rawListings;
}
// TODO: Complete method implementation with subgraph
// /**
// * @beta - This method is not yet complete.
// *
// * @param listingId
// * @returns
// */
// public async getActiveOffers(listingId: BigNumberish): Promise<Offer[]> {
// const listing = await this.validateDirectListing(BigNumber.from(listingId));
// const offers = await this.readOnlyContract.offers(listing.id, "");
// return await Promise.all(
// offers.map(async (offer: any) => {
// return await this.mapOffer(BigNumber.from(listingId), offer);
// }),
// );
// }
} | the_stack |
import * as d3 from 'd3';
import { d3GetEvent } from './utils';
import * as _ from 'underscore';
import * as selector from './Selector';
import * as utils from './utils';
import * as sel_utils from './selector_utils';
import { Mark } from './Mark';
// TODO: examine refactoring the two abstract base classes belowing using an
// up-to-date mixin pattern.
// Because we use abstract base classes, the mixins cannot work with current TypeScript:
// https://github.com/microsoft/TypeScript/issues/29653
// Instead of using mixins we use free functions for now
function point_selector(x, y) {
return function (xar, yar) {
if (typeof yar === 'undefined') {
// the 'old' method for backwards compatibility
return sel_utils.point_in_rectangle(xar, x, y);
}
const len = Math.min(xar.length, yar.length);
const mask = new Uint8Array(len);
// for performance we keep the if statement out of the loop
if (x.length && y.length) {
for (let i = 0; i < len; i++) {
mask[i] =
x[0] <= xar[i] && xar[i] <= x[1] && y[0] <= yar[i] && yar[i] <= y[1]
? 1
: 0;
}
return mask;
} else if (x.length) {
for (let i = 0; i < len; i++) {
mask[i] = x[0] <= xar[i] && xar[i] <= x[1] ? 1 : 0;
}
} else {
// (y.length)
for (let i = 0; i < len; i++) {
mask[i] = y[0] <= yar[i] && yar[i] <= y[1] ? 1 : 0;
}
}
return mask;
};
}
function rect_selector(x, y) {
return function (xy) {
return sel_utils.rect_inter_rect(xy[0], xy[1], x, y);
};
}
function sort(a, b) {
return a - b;
}
function update_mark_selected(brush, extent_x?, extent_y?) {
if (extent_x === undefined || extent_x.length === 0) {
// Reset all the selected in marks
_.each(brush.mark_views, (mark_view: any) => {
return mark_view.selector_changed();
});
return;
}
let x, y;
if (extent_y === undefined) {
// 1d brush
const orient = brush.model.get('orientation');
(x = orient == 'vertical' ? [] : extent_x),
(y = orient == 'vertical' ? extent_x : []);
} else {
// 2d brush
(x = extent_x), (y = extent_y);
}
if (x.length) {
x.sort(sort);
}
if (y.length) {
y.sort(sort);
}
_.each(brush.mark_views, (mark_view: any) => {
mark_view.selector_changed(point_selector(x, y), rect_selector(x, y));
});
}
function adjust_rectangle(brush) {
if (brush.model.get('orientation') == 'vertical') {
brush.d3el.selectAll('rect').attr('x', 0).attr('width', brush.width);
} else {
brush.d3el.selectAll('rect').attr('y', 0).attr('height', brush.height);
}
}
abstract class BrushMixinXYSelector extends selector.BaseXYSelector {
brush_render() {
this.brushing = false;
}
color_change() {
if (this.model.get('color') !== null) {
this.d3el.selectAll('.selection').style('fill', this.model.get('color'));
}
}
brush_start() {
if (this.ignoreBrushEvents) {
return;
}
this.brushing = true;
this.model.set('brushing', true);
this.touch();
}
brush_move() {
if (this.ignoreBrushEvents) {
return;
}
this.convert_and_save();
}
brush_end() {
if (this.ignoreBrushEvents) {
return;
}
this.model.set('brushing', false);
this.convert_and_save();
this.brushing = false;
}
adjust_rectangle() {
adjust_rectangle(this);
}
update_mark_selected(extent_x?, extent_y?) {
return update_mark_selected(this, extent_x, extent_y);
}
abstract convert_and_save(extent?, item?);
brush: d3.BrushBehavior<any>;
brushing: boolean;
// TODO: should this be mark_views_promises?
mark_views: Mark[];
ignoreBrushEvents = false;
}
abstract class BrushMixinXSelector extends selector.BaseXSelector {
brush_render() {
this.brushing = false;
}
color_change() {
if (this.model.get('color') !== null) {
this.d3el.selectAll('.selection').style('fill', this.model.get('color'));
}
}
brush_start() {
if (this.ignoreBrushEvents) {
return;
}
this.brushing = true;
this.model.set('brushing', true);
this.touch();
}
brush_move() {
if (this.ignoreBrushEvents) {
return;
}
this.convert_and_save();
}
brush_end() {
if (this.ignoreBrushEvents) {
return;
}
this.model.set('brushing', false);
this.convert_and_save();
this.brushing = false;
}
adjust_rectangle() {
adjust_rectangle(this);
}
update_mark_selected(extent_x?, extent_y?) {
return update_mark_selected(this, extent_x, extent_y);
}
abstract convert_and_save(extent?, item?);
brush: d3.BrushBehavior<any>;
brushing: boolean;
// TODO: should this be mark_views_promises?
mark_views: Mark[];
ignoreBrushEvents = false;
}
export class BrushSelector extends BrushMixinXYSelector {
async render() {
await super.render();
this.brush_render();
await this.create_scales();
await this.mark_views_promise;
this.brush = d3
.brush()
.on('start', _.bind(this.brush_start, this))
.on('brush', _.bind(this.brush_move, this))
.on('end', _.bind(this.brush_end, this));
this.brush.extent([
[0, 0],
[this.width, this.height],
]);
this.d3el.attr('class', 'selector brushintsel');
this.d3el.call(this.brush);
this.adjust_rectangle();
this.color_change();
this.create_listeners();
this.selected_changed();
}
create_listeners() {
super.create_listeners();
this.listenTo(this.model, 'change:color', this.color_change);
// Move these to BaseXYSelector
this.listenTo(this.model, 'change:selected_x', this.selected_changed);
this.listenTo(this.model, 'change:selected_y', this.selected_changed);
}
empty_selection() {
this.update_mark_selected();
this.model.set('selected_x', null);
this.model.set('selected_y', null);
this.touch();
}
convert_and_save() {
const e = d3GetEvent();
if (!e.sourceEvent) {
return;
}
if (!e.selection) {
this.empty_selection();
} else {
const d0 = e.selection;
const pixel_extent_x = [d0[0][0], d0[1][0]];
const pixel_extent_y = [d0[1][1], d0[0][1]];
const extent_x = pixel_extent_x
.map(this.x_scale.invert.bind(this.x_scale))
.sort(sort);
const extent_y = pixel_extent_y
.map(this.y_scale.invert.bind(this.y_scale))
.sort(sort);
this.update_mark_selected(pixel_extent_x, pixel_extent_y);
this.set_selected(
'selected_x',
this.x_scale.model.typedRange(extent_x as number[])
);
this.set_selected(
'selected_y',
this.y_scale.model.typedRange(extent_y as number[])
);
this.touch();
}
}
selected_changed() {
if (this.brushing) {
return;
}
//reposition the interval selector and set the selected attribute.
const selected_x = this.model.get('selected_x') || [],
selected_y = this.model.get('selected_y') || [];
if (selected_x.length === 0 || selected_y.length === 0) {
this.update_mark_selected();
} else if (selected_x.length != 2 || selected_y.length != 2) {
// invalid value for selected. Ignoring the value
return;
} else {
const pixel_extent_x = selected_x
.map((v) => this.x_scale.offset + this.x_scale.scale(v))
.sort(sort);
const pixel_extent_y = selected_y
.map((v) => this.y_scale.offset + this.y_scale.scale(v))
.sort(sort);
this.update_mark_selected(pixel_extent_x, pixel_extent_y);
}
this.syncModelToBrush();
}
relayout() {
super.relayout();
this.d3el
.select('.background')
.attr('width', this.width)
.attr('height', this.height);
this.set_x_range([this.x_scale]);
this.set_y_range([this.y_scale]);
this.brush.extent([
[0, 0],
[this.width, this.height],
]);
this.syncModelToBrush();
}
private syncModelToBrush() {
// Move and redraw the brush selector, preventing move events to be triggered
this.ignoreBrushEvents = true;
try {
if (this.model.get('selected_x') && this.model.get('selected_y')) {
const range_x = this.model
.get('selected_x')
.map((v) => this.x_scale.offset + this.x_scale.scale(v))
.sort(sort);
const range_y = this.model
.get('selected_y')
.map((v) => this.y_scale.offset + this.y_scale.scale(v))
.sort(sort);
this.brush.move(this.d3el, [
[range_x[0], range_y[0]],
[range_x[1], range_y[1]],
]);
} else {
this.brush.move(this.d3el, null);
}
this.d3el.call(this.brush);
} finally {
this.ignoreBrushEvents = false;
}
this.d3el.call(this.brush);
}
// TODO: check that we've properly overridden the mixin.
adjust_rectangle() {}
reset() {}
d3el: d3.Selection<SVGGElement, any, any, any>;
}
export class BrushIntervalSelector extends BrushMixinXSelector {
async render() {
await super.render();
this.brush_render();
await this.mark_views_promise;
await this.create_scales();
this.brush = (
this.model.get('orientation') == 'vertical' ? d3.brushY() : d3.brushX()
)
.on('start', _.bind(this.brush_start, this))
.on('brush', _.bind(this.brush_move, this))
.on('end', _.bind(this.brush_end, this));
this.brush.extent([
[0, 0],
[this.width, this.height],
]);
this.d3el.attr('class', 'selector brushintsel');
this.d3el.call(this.brush);
this.adjust_rectangle();
this.color_change();
this.create_listeners();
this.selected_changed();
}
create_listeners() {
super.create_listeners();
this.listenTo(this.model, 'change:color', this.color_change);
this.listenTo(this.model, 'change:selected', this.selected_changed);
}
empty_selection() {
this.update_mark_selected();
this.model.set('selected', null);
this.touch();
}
convert_and_save() {
const e = d3GetEvent();
if (!e.sourceEvent) {
return;
}
if (!e.selection) {
this.empty_selection();
} else {
const pixel_extent = e.selection;
const extent = pixel_extent
.map(this.scale.invert.bind(this.scale))
.sort(sort);
this.update_mark_selected(pixel_extent);
this.set_selected('selected', this.scale.model.typedRange(extent));
this.touch();
}
}
update_scale_domain(ignore_gui_update) {
// Call the base class function to update the scale.
super.update_scale_domain();
if (ignore_gui_update !== true) {
this.selected_changed();
}
}
selected_changed() {
if (this.brushing) {
return;
}
//reposition the interval selector and set the selected attribute.
const selected = this.model.get('selected') || [];
if (selected.length === 0) {
this.update_mark_selected();
} else if (selected.length != 2) {
// invalid value for selected. Ignoring the value
return;
} else {
const extent = [selected[0], selected[1]];
const pixel_extent = extent.map((v) => this.scale.scale(v)).sort(sort);
this.update_mark_selected(pixel_extent);
}
this.syncModelToBrush();
}
relayout() {
super.relayout();
this.adjust_rectangle();
this.d3el
.select('.background')
.attr('width', this.width)
.attr('height', this.height);
this.set_range([this.scale]);
this.brush.extent([
[0, 0],
[this.width, this.height],
]);
this.syncModelToBrush();
}
private syncModelToBrush() {
// Move and redraw the brush selector, preventing move events to be triggered
this.ignoreBrushEvents = true;
try {
if (this.model.get('selected')) {
const range = this.model
.get('selected')
.map((v) => this.scale.scale(v))
.sort(sort);
this.brush.move(this.d3el, range);
} else {
this.brush.move(this.d3el, null);
}
this.d3el.call(this.brush);
} finally {
this.ignoreBrushEvents = false;
}
}
reset() {}
d3el: d3.Selection<SVGGElement, any, any, any>;
}
function add_remove_classes(selection, add_classes, remove_classes) {
//adds the classes present in add_classes and removes the classes in
//the list remove_classes
//selection attribute should be a d3-selection
if (remove_classes) {
remove_classes.forEach((r_class) => {
selection.classed(r_class, false);
});
}
if (add_classes) {
add_classes.forEach((a_class) => {
selection.classed(a_class, true);
});
}
}
export class MultiSelector extends BrushMixinXSelector {
render() {
super.render.apply(this);
this.brush_render();
this.names = this.model.get('names');
this.curr_index = 0;
const scale_creation_promise = this.create_scales();
Promise.all([this.mark_views_promise, scale_creation_promise]).then(() => {
this.d3el.attr('class', 'multiselector');
this.d3el.attr('width', this.width);
this.d3el.attr('height', this.height);
this.create_brush();
this.selecting_brush = false;
this.create_listeners();
});
}
create_listeners() {
super.create_listeners();
this.listenTo(this.model, 'change:names', this.labels_change);
this.listenTo(this.model, 'change:color', this.color_change);
}
labels_change(model, value) {
const prev_names = model.previous('names');
this.names = value;
const data = _.range(this.curr_index + 1);
const selected = utils.deepCopy(this.model.get('selected'));
// TODO: Use do diff?
data.forEach((elem) => {
const label = this.get_label(elem);
const prev_label = this.get_label(elem, prev_names);
if (prev_label !== label) {
this.d3el.select('.brush_text_' + elem).text(label);
selected[label] = selected[prev_label];
delete selected[prev_label];
}
});
this.set_selected('_selected', selected);
this.touch();
}
create_brush() {
// Function to add new brushes.
const index = this.curr_index;
const vertical = this.model.get('orientation') == 'vertical';
const brush: d3.BrushBehavior<any> = (vertical ? d3.brushY() : d3.brushX())
.on('start', () => {
this.brush_start();
})
.on('brush', () => {
this.multi_brush_move(index);
})
.on('end', () => {
this.multi_brush_end(index);
});
brush.extent([
[0, 0],
[this.width, this.height],
]);
const new_brush_g: d3.Selection<SVGGElement, any, any, any> = this.d3el
.append('g')
.attr('class', 'selector brushintsel active');
new_brush_g
.append('text')
.text(this.get_label(this.curr_index))
.attr('class', 'brush_text_' + this.curr_index)
.style('display', 'none');
if (this.model.get('orientation') == 'vertical') {
new_brush_g.select('text').attr('x', 30);
} else {
new_brush_g.select('text').attr('y', 30);
}
new_brush_g.call(brush);
this.color_change();
this.adjust_rectangle();
this.reset_handler(new_brush_g);
this.brushes[this.curr_index] = brush;
this.brush_g[this.curr_index] = new_brush_g;
this.curr_index = this.curr_index + 1;
}
reset_handler(brush_g) {
const that = this;
const old_handler = brush_g.on('mousedown.brush');
brush_g.on('mousedown.brush', function () {
const accelKey = d3GetEvent().ctrlKey || d3GetEvent().metaKey;
if (d3GetEvent().shiftKey && accelKey) {
that.reset();
} else if (accelKey) {
add_remove_classes(d3.select(this), ['inactive'], ['active']);
that.create_brush();
} else if (d3GetEvent().shiftKey && that.selecting_brush === false) {
add_remove_classes(
that.d3el.selectAll('.selector'),
['visible'],
['active', 'inactive']
);
that.selecting_brush = true;
} else {
add_remove_classes(
that.d3el.selectAll('.selector'),
['inactive'],
['visible']
);
add_remove_classes(d3.select(this), ['active'], ['inactive']);
old_handler.call(this);
that.selecting_brush = false;
}
});
}
get_label(index, arr?) {
//arr is optional. If you do not pass anything, this.names is
//considered arr.
if (arr === undefined || arr === null) {
arr = this.names;
}
return arr.length > index ? arr[index] : index;
}
multi_brush_move(item) {
if (this.ignoreBrushEvents) {
return;
}
const extent = d3GetEvent().selection;
this.update_text(item, extent);
this.convert_and_save(extent, item);
}
update_text(item, extent) {
if (extent === null) {
this.d3el.select('.brush_text_' + item).style('display', 'none');
return;
}
const orient = this.model.get('orientation') == 'vertical' ? 'y' : 'x';
const hide_names = !this.model.get('show_names');
const mid = (extent[0] + extent[1]) / 2;
this.d3el
.select('.brush_text_' + item)
.style('display', hide_names ? 'none' : 'inline')
.attr(orient, mid);
}
multi_brush_end(item) {
if (this.ignoreBrushEvents) {
return;
}
const sel = d3GetEvent().selection;
this.model.set('brushing', false);
this.convert_and_save(sel, item);
this.brushing = false;
}
reset() {
this.d3el.selectAll('.selector').remove();
this.model.set('_selected', {});
this.curr_index = 0;
this.brushes = [];
this.brush_g = [];
this.touch();
this.create_brush();
}
convert_and_save(extent, item) {
if (!extent) {
this.update_mark_selected();
this.model.set('_selected', {});
} else {
const selected = utils.deepCopy(this.model.get('_selected'));
selected[this.get_label(item)] = extent.map(
this.scale.invert.bind(this.scale)
);
this.update_mark_selected(extent);
this.model.set('_selected', selected);
}
this.touch();
}
// TODO: make a proper implementation
selected_changed() {}
relayout() {
super.relayout();
this.adjust_rectangle();
this.d3el
.select('.background')
.attr('width', this.width)
.attr('height', this.height);
this.set_range([this.scale]);
this.brushes.forEach((brush) => {
brush.extent([
[0, 0],
[this.width, this.height],
]);
});
this.syncModelToBrush();
}
private syncModelToBrush() {
// Move and redraw the brush selectors, preventing move events to be triggered
this.ignoreBrushEvents = true;
try {
const selected = this.model.get('_selected');
this.brushes.forEach((brush, index) => {
const brushSelected = selected[this.get_label(index)];
if (brushSelected) {
const range = brushSelected
.map((v) => this.scale.scale(v))
.sort(sort);
this.update_text(index, range);
brush.move(this.brush_g[index], range);
} else {
this.update_text(index, null);
brush.move(this.brush_g[index], null);
}
this.brush_g[index].call(brush);
this.reset_handler(this.brush_g[index]);
});
} finally {
this.ignoreBrushEvents = false;
}
}
remove() {
this.model.off('change:names', null, this);
this.model.off('change:color', null, this);
super.remove();
}
brushes: d3.BrushBehavior<any>[] = [];
brush_g: d3.Selection<SVGGElement, any, any, any>[] = [];
curr_index: number;
selecting_brush: boolean;
names: string[];
} | the_stack |
import {
Mutable,
Class,
Equals,
Equivalent,
Arrays,
Values,
Domain,
Range,
AnyTiming,
Timing,
Easing,
LinearDomain,
LinearRange,
ContinuousScale,
LinearScale,
} from "@swim/util";
import {Affinity, MemberFastenerClass, Property, Animator} from "@swim/component";
import {DateTime, TimeDomain, TimeScale} from "@swim/time";
import {ScaleGestureInput, ScaleGesture, ViewContextType, ViewFlags, View, ViewSet} from "@swim/view";
import {GraphicsViewInit, GraphicsView} from "@swim/graphics";
import {ScaledXView} from "./ScaledXView";
import {ScaledYView} from "./ScaledYView";
import type {ScaledXYView} from "./ScaledXYView";
import {ContinuousScaleAnimator} from "./ContinuousScaleAnimator";
import type {ScaledViewObserver} from "./ScaledViewObserver";
/** @internal */
export type ScaledFlags = number;
/** @public */
export interface ScaledViewInit<X = unknown, Y = unknown> extends GraphicsViewInit {
xScale?: ContinuousScale<X, number>;
yScale?: ContinuousScale<Y, number>;
xDomainBounds?: [X | boolean, X | boolean];
yDomainBounds?: [Y | boolean, Y | boolean];
xZoomBounds?: [number | boolean, number | boolean];
yZoomBounds?: [number | boolean, number | boolean];
xDomainPadding?: [X | boolean, X | boolean];
yDomainPadding?: [Y | boolean, Y | boolean];
xRangePadding?: [number, number];
yRangePadding?: [number, number];
fitAlign?: [number, number] | number;
xFitAlign?: number;
yFitAlign?: number;
fitAspectRatio?: number;
preserveAspectRatio?: boolean;
domainTracking?: [boolean, boolean] | boolean;
xDomainTracking?: boolean;
yDomainTracking?: boolean;
scaleGestures?: [boolean, boolean] | boolean;
xScaleGestures?: boolean;
yScaleGestures?: boolean;
rescaleTransition?: AnyTiming | boolean;
reboundTransition?: AnyTiming | boolean;
}
/** @public */
export abstract class ScaledView<X = unknown, Y = unknown> extends GraphicsView implements ScaledXYView<X, Y> {
constructor() {
super();
this.scaledFlags = 0;
this.xDataDomain = null;
this.yDataDomain = null;
this.xDataRange = null;
this.yDataRange = null;
this.xDataDomainPadded = null;
this.yDataDomainPadded = null;
}
override readonly observerType?: Class<ScaledViewObserver<X, Y>>;
/** @internal */
readonly scaledFlags: ScaledFlags;
/** @internal */
setScaledFlags(scaledFlags: ScaledFlags): void {
(this as Mutable<this>).scaledFlags = scaledFlags;
}
@Animator<ScaledView<X, Y>, ContinuousScale<X, number> | null>({
extends: ContinuousScaleAnimator,
type: ContinuousScale,
inherits: true,
value: null,
updateFlags: View.NeedsLayout,
willSetValue(newXScale: ContinuousScale<X, number> | null, oldXScale: ContinuousScale<X, number> | null): void {
this.owner.callObservers("viewWillSetXScale", newXScale, oldXScale, this.owner);
},
didSetValue(newXScale: ContinuousScale<X, number> | null, oldXScale: ContinuousScale<X, number> | null): void {
this.owner.updateXDataRange();
this.owner.callObservers("viewDidSetXScale", newXScale, oldXScale, this.owner);
},
willTransition(xScale: ContinuousScale<X, number>): void {
if ((this.owner.scaledFlags & ScaledView.XBoundingFlag) !== 0) {
this.owner.onBeginBoundingXScale(xScale);
}
},
didTransition(xScale: ContinuousScale<X, number>): void {
if ((this.owner.scaledFlags & ScaledView.XBoundingFlag) !== 0) {
this.owner.onEndBoundingXScale(xScale);
}
},
didInterrupt(xScale: ContinuousScale<X, number>): void {
if ((this.owner.scaledFlags & ScaledView.XBoundingFlag) !== 0) {
this.owner.onInterruptBoundingXScale(xScale);
}
},
})
readonly xScale!: ContinuousScaleAnimator<this, X, number>;
@Animator<ScaledView<X, Y>, ContinuousScale<Y, number> | null>({
extends: ContinuousScaleAnimator,
type: ContinuousScale,
inherits: true,
value: null,
updateFlags: View.NeedsLayout,
willSetValue(newYScale: ContinuousScale<Y, number> | null, oldYScale: ContinuousScale<Y, number> | null): void {
this.owner.callObservers("viewWillSetYScale", newYScale, oldYScale, this.owner);
},
didSetValue(newYScale: ContinuousScale<Y, number> | null, oldYScale: ContinuousScale<Y, number> | null): void {
this.owner.updateYDataRange();
this.owner.callObservers("viewDidSetYScale", newYScale, oldYScale, this.owner);
},
willTransition(yScale: ContinuousScale<Y, number>): void {
if ((this.owner.scaledFlags & ScaledView.YBoundingFlag) !== 0) {
this.owner.onBeginBoundingYScale(yScale);
}
},
didTransition(yScale: ContinuousScale<Y, number>): void {
if ((this.owner.scaledFlags & ScaledView.YBoundingFlag) !== 0) {
this.owner.onEndBoundingYScale(yScale);
}
},
didInterrupt(yScale: ContinuousScale<Y, number>): void {
if ((this.owner.scaledFlags & ScaledView.YBoundingFlag) !== 0) {
this.owner.onInterruptBoundingYScale(yScale);
}
},
})
readonly yScale!: ContinuousScaleAnimator<this, Y, number>;
xDomain(): Domain<X> | null;
xDomain(xDomain: Domain<X> | string | null, timing?: AnyTiming | boolean): this;
xDomain(xMin: X, xMax: X, timing?: AnyTiming | boolean): this;
xDomain(xMin?: Domain<X> | X | string | null, xMax?: X | AnyTiming | boolean,
timing?: AnyTiming | boolean): Domain<X> | null | this {
if (xMin === void 0) {
const xScale = this.xScale.value;
return xScale !== null ? xScale.domain : null;
} else {
if (xMin instanceof Domain || typeof xMin === "string") {
timing = xMax as AnyTiming | boolean;
}
if (timing === true) {
timing = this.rescaleTransition.value;
}
const xRange = this.xRange();
if (xMin instanceof Domain || typeof xMin === "string") {
if (xRange !== null) {
this.xScale.setBaseScale(xMin as Domain<X> | string, xRange, timing);
} else {
this.xScale.setBaseDomain(xMin as Domain<X> | string, timing);
}
} else {
if (xRange !== null) {
this.xScale.setBaseScale(xMin as X, xMax as X, xRange[0], xRange[1], timing);
} else {
this.xScale.setBaseDomain(xMin as X, xMax as X, timing);
}
}
return this;
}
}
yDomain(): Domain<Y> | null;
yDomain(yDomain: Domain<Y> | string | null, timing?: AnyTiming | boolean): this;
yDomain(yMin: Y, yMax: Y, timing?: AnyTiming | boolean): this;
yDomain(yMin?: Domain<Y> | Y | string | null, yMax?: Y | AnyTiming | boolean,
timing?: AnyTiming | boolean): Domain<Y> | null | this {
if (yMin === void 0) {
const yScale = this.yScale.value;
return yScale !== null ? yScale.domain : null;
} else {
if (yMin instanceof Domain || typeof yMin === "string") {
timing = yMax as AnyTiming | boolean;
}
if (timing === true) {
timing = this.rescaleTransition.value;
}
const yRange = this.yRange();
if (yMin instanceof Domain || typeof yMin === "string") {
if (yRange !== null) {
this.yScale.setBaseScale(yMin as Domain<Y> | string, yRange, timing);
} else {
this.yScale.setBaseDomain(yMin as Domain<Y>| string, timing);
}
} else {
if (yRange !== null) {
this.yScale.setBaseScale(yMin as Y, yMax as Y, yRange[0], yRange[1], timing);
} else {
this.yScale.setBaseDomain(yMin as Y, yMax as Y, timing);
}
}
return this;
}
}
xRange(): Range<number> | null {
const width = this.viewFrame.width;
if (isFinite(width)) {
const xRangePadding = this.xRangePadding.value;
const xRangeMin = xRangePadding[0];
const xRangeMax = width - xRangePadding[1];
return LinearRange(xRangeMin, xRangeMax);
} else {
return null;
}
}
yRange(): Range<number> | null {
const height = this.viewFrame.height;
if (isFinite(height)) {
const yRangePadding = this.yRangePadding.value;
const yRangeMin = yRangePadding[0];
const yRangeMax = height - yRangePadding[1];
return LinearRange(yRangeMax, yRangeMin);
} else {
return null;
}
}
readonly xDataDomain: Domain<X> | null;
protected setXDataDomain(newXDataDomain: Domain<X> | null): void {
const oldXDataDomain = this.xDataDomain;
if (!Equals(newXDataDomain, oldXDataDomain)) {
this.willSetXDataDomain(newXDataDomain, oldXDataDomain);
(this as Mutable<this>).xDataDomain = newXDataDomain;
this.onSetXDataDomain(newXDataDomain, oldXDataDomain);
this.didSetXDataDomain(newXDataDomain, oldXDataDomain);
}
}
protected willSetXDataDomain(newXDataDomain: Domain<X> | null, oldXDataDomain: Domain<X> | null): void {
this.callObservers("viewWillSetXDataDomain", newXDataDomain, oldXDataDomain, this);
}
protected onSetXDataDomain(newXDataDomain: Domain<X> | null, oldXDataDomain: Domain<X> | null): void {
this.updateXDataRange();
this.updateXDataDomainPadded();
this.requireUpdate(View.NeedsLayout);
}
protected didSetXDataDomain(newXDataDomain: Domain<X> | null, oldXDataDomain: Domain<X> | null): void {
this.callObservers("viewDidSetXDataDomain", newXDataDomain, oldXDataDomain, this);
}
protected updateXDataDomain(xScaledDomain: Domain<X> | null): void {
let xDataDomain = this.xDataDomain;
if (xDataDomain === null || this.scaled.viewCount === 1) {
xDataDomain = xScaledDomain;
} else if (xScaledDomain !== null) {
if (Values.compare(xScaledDomain[0], xDataDomain[0]) < 0) {
xDataDomain = Domain(xScaledDomain[0], xDataDomain[1]);
}
if (Values.compare(xDataDomain[1], xScaledDomain[1]) < 0) {
xDataDomain = Domain(xDataDomain[0], xScaledDomain[1]);
}
}
this.setXDataDomain(xDataDomain);
}
readonly yDataDomain: Domain<Y> | null;
protected setYDataDomain(newYDataDomain: Domain<Y> | null): void {
const oldYDataDomain = this.yDataDomain;
if (!Equals(newYDataDomain, oldYDataDomain)) {
this.willSetYDataDomain(newYDataDomain, oldYDataDomain);
(this as Mutable<this>).yDataDomain = newYDataDomain;
this.onSetYDataDomain(newYDataDomain, oldYDataDomain);
this.didSetYDataDomain(newYDataDomain, oldYDataDomain);
}
}
protected willSetYDataDomain(newYDataDomain: Domain<Y> | null, oldYDataDomain: Domain<Y> | null): void {
this.callObservers("viewWillSetYDataDomain", newYDataDomain, oldYDataDomain, this);
}
protected onSetYDataDomain(newYDataDomain: Domain<Y> | null, oldYDataDomain: Domain<Y> | null): void {
this.updateYDataRange();
this.updateYDataDomainPadded();
this.requireUpdate(View.NeedsLayout);
}
protected didSetYDataDomain(newYDataDomain: Domain<Y> | null, oldYDataDomain: Domain<Y> | null): void {
this.callObservers("viewDidSetYDataDomain", newYDataDomain, oldYDataDomain, this);
}
protected updateYDataDomain(yScaledDomain: Domain<Y> | null): void {
let yDataDomain = this.yDataDomain;
if (yDataDomain === null || this.scaled.viewCount === 1) {
yDataDomain = yScaledDomain;
} else if (yScaledDomain !== null) {
if (Values.compare(yScaledDomain[0], yDataDomain[0]) < 0) {
yDataDomain = Domain(yScaledDomain[0], yDataDomain[1]);
}
if (Values.compare(yDataDomain[1], yScaledDomain[1]) < 0) {
yDataDomain = Domain(yDataDomain[0], yScaledDomain[1]);
}
}
this.setYDataDomain(yDataDomain);
}
readonly xDataRange: Range<number> | null;
protected setXDataRange(xDataRange: Range<number> | null): void {
(this as Mutable<this>).xDataRange = xDataRange;
}
protected updateXDataRange(): void {
const xDataDomain = this.xDataDomain;
if (xDataDomain !== null) {
const xScale = this.xScale.value;
if (xScale !== null) {
this.setXDataRange(LinearRange(xScale(xDataDomain[0]), xScale(xDataDomain[1])));
} else {
this.setXDataRange(null);
}
}
}
readonly yDataRange: Range<number> | null;
protected setYDataRange(yDataRange: Range<number> | null): void {
(this as Mutable<this>).yDataRange = yDataRange;
}
protected updateYDataRange(): void {
const yDataDomain = this.yDataDomain;
if (yDataDomain !== null) {
const yScale = this.yScale.value;
if (yScale !== null) {
this.setYDataRange(LinearRange(yScale(yDataDomain[0]), yScale(yDataDomain[1])));
} else {
this.setYDataRange(null);
}
}
}
readonly xDataDomainPadded: Domain<X> | null;
protected setXDataDomainPadded(xDataDomainPadded: Domain<X> | null): void {
(this as Mutable<this>).xDataDomainPadded = xDataDomainPadded;
}
protected updateXDataDomainPadded(): void {
let xDataDomainPadded: Domain<X> | null;
const xDataDomain = this.xDataDomain;
if (xDataDomain !== null) {
let xDataDomainPaddedMin = xDataDomain[0];
let xDataDomainPaddedMax = xDataDomain[1];
const xDomainPadding = this.xDomainPadding.value;
if (typeof xDomainPadding[0] !== "boolean") {
xDataDomainPaddedMin = (+xDataDomainPaddedMin - +xDomainPadding[0]) as unknown as X;
}
if (typeof xDomainPadding[1] !== "boolean") {
xDataDomainPaddedMax = (+xDataDomainPaddedMax + +xDomainPadding[1]) as unknown as X;
}
xDataDomainPadded = Domain(xDataDomainPaddedMin, xDataDomainPaddedMax);
} else {
xDataDomainPadded = null;
}
this.setXDataDomainPadded(xDataDomainPadded);
}
readonly yDataDomainPadded: Domain<Y> | null;
protected setYDataDomainPadded(yDataDomainPadded: Domain<Y> | null): void {
(this as Mutable<this>).yDataDomainPadded = yDataDomainPadded;
}
protected updateYDataDomainPadded(): void {
let yDataDomainPadded: Domain<Y> | null;
const yDataDomain = this.yDataDomain;
if (yDataDomain !== null) {
let yDataDomainPaddedMin = yDataDomain[0];
let yDataDomainPaddedMax = yDataDomain[1];
const yDomainPadding = this.yDomainPadding.value;
if (typeof yDomainPadding[0] !== "boolean") {
yDataDomainPaddedMin = (+yDataDomainPaddedMin - +yDomainPadding[0]) as unknown as Y;
}
if (typeof yDomainPadding[1] !== "boolean") {
yDataDomainPaddedMax = (+yDataDomainPaddedMax + +yDomainPadding[1]) as unknown as Y;
}
yDataDomainPadded = Domain(yDataDomainPaddedMin, yDataDomainPaddedMax);
} else {
yDataDomainPadded = null;
}
this.setYDataDomainPadded(yDataDomainPadded);
}
@Property<ScaledView<X, Y>, readonly [X | boolean, X | boolean]>({
updateFlags: View.NeedsLayout,
initValue(): readonly [X | boolean, X | boolean] {
return [true, true];
},
equalValues(newXDomainBounds: readonly [X | boolean, X | boolean], oldXDomainBounds: readonly [X | boolean, X | boolean]): boolean {
return Arrays.equal(newXDomainBounds, oldXDomainBounds);
},
})
readonly xDomainBounds!: Property<this, readonly [X | boolean, X | boolean]>
@Property<ScaledView<X, Y>, readonly [Y | boolean, Y | boolean]>({
updateFlags: View.NeedsLayout,
initValue(): readonly [Y | boolean, Y | boolean] {
return [true, true];
},
equalValues(newYDomainBounds: readonly [Y | boolean, Y | boolean], oldYDomainBounds: readonly [Y | boolean, Y | boolean]): boolean {
return Arrays.equal(newYDomainBounds, oldYDomainBounds);
},
})
readonly yDomainBounds!: Property<this, readonly [Y | boolean, Y | boolean]>
@Property<ScaledView<X, Y>, readonly [number | boolean, number | boolean]>({
updateFlags: View.NeedsLayout,
initValue(): readonly [number | boolean, number | boolean] {
return [true, true];
},
equalValues(newXZoomBounds: readonly [number | boolean, number | boolean], oldXZoomBounds: readonly [number | boolean, number | boolean]): boolean {
return Arrays.equal(newXZoomBounds, oldXZoomBounds);
},
})
readonly xZoomBounds!: Property<this, readonly [number | boolean, number | boolean]>
@Property<ScaledView<X, Y>, readonly [number | boolean, number | boolean]>({
updateFlags: View.NeedsLayout,
initValue(): readonly [number | boolean, number | boolean] {
return [true, true];
},
equalValues(newYDomainBounds: readonly [number | boolean, number | boolean], oldYDomainBounds: readonly [number | boolean, number | boolean]): boolean {
return Arrays.equal(newYDomainBounds, oldYDomainBounds);
},
})
readonly yZoomBounds!: Property<this, readonly [number | boolean, number | boolean]>
@Property<ScaledView<X, Y>, readonly [X | boolean, X | boolean]>({
updateFlags: View.NeedsLayout,
initValue(): readonly [X | boolean, X | boolean] {
return [false, false];
},
equalValues(newXDomainPadding: readonly [X | boolean, X | boolean], oldXDomainPadding: readonly [X | boolean, X | boolean]): boolean {
return Arrays.equal(newXDomainPadding, oldXDomainPadding);
},
})
readonly xDomainPadding!: Property<this, readonly [X | boolean, X | boolean]>
@Property<ScaledView<X, Y>, readonly [Y | boolean, Y | boolean]>({
updateFlags: View.NeedsLayout,
initValue(): readonly [Y | boolean, Y | boolean] {
return [false, false];
},
equalValues(newYDomainPadding: readonly [Y | boolean, Y | boolean], oldYDomainPadding: readonly [Y | boolean, Y | boolean]): boolean {
return Arrays.equal(newYDomainPadding, oldYDomainPadding);
},
})
readonly yDomainPadding!: Property<this, readonly [Y | boolean, Y | boolean]>
protected updateXRangePadding(xScaledRangePadding: readonly [number, number]): void {
if (this.xRangePadding.hasAffinity(Affinity.Intrinsic)) {
let xRangePadding = this.xRangePadding.value;
if (xRangePadding === null || this.scaled.viewCount === 1) {
xRangePadding = xScaledRangePadding;
} else if (xScaledRangePadding !== null) {
xRangePadding = [Math.max(xRangePadding[0], xScaledRangePadding[0]), Math.max(xRangePadding[1], xScaledRangePadding[1])];
}
this.xRangePadding.setValue(xRangePadding, Affinity.Intrinsic);
}
}
@Property<ScaledView<X, Y>, readonly [number, number]>({
updateFlags: View.NeedsLayout,
initValue(): readonly [number, number] {
return [0, 0];
},
willSetValue(newXRangePadding: readonly [number, number], oldXRangePadding: readonly [number, number]): void {
this.owner.callObservers("viewWillSetXRangePadding", newXRangePadding, oldXRangePadding, this.owner);
},
didSetValue(newXRangePadding: readonly [number, number], oldXRangePadding: readonly [number, number]): void {
this.owner.callObservers("viewDidSetXRangePadding", newXRangePadding, oldXRangePadding, this.owner);
},
equalValues(newXRangePadding: readonly [number, number], oldXRangePadding: readonly [number, number]): boolean {
return Arrays.equal(newXRangePadding, oldXRangePadding);
},
})
readonly xRangePadding!: Property<this, readonly [number, number]>
protected updateYRangePadding(yScaledRangePadding: readonly [number, number]): void {
if (this.yRangePadding.hasAffinity(Affinity.Intrinsic)) {
let yRangePadding = this.yRangePadding.value;
if (yRangePadding === null || this.scaled.viewCount === 1) {
yRangePadding = yScaledRangePadding;
} else if (yScaledRangePadding !== null) {
yRangePadding = [Math.max(yRangePadding[0], yScaledRangePadding[0]), Math.max(yRangePadding[1], yScaledRangePadding[1])];
}
this.yRangePadding.setValue(yRangePadding, Affinity.Intrinsic);
}
}
@Property<ScaledView<X, Y>, readonly [number, number]>({
updateFlags: View.NeedsLayout,
initValue(): readonly [number, number] {
return [0, 0];
},
willSetValue(newYRangePadding: readonly [number, number], oldYRangePadding: readonly [number, number]): void {
this.owner.callObservers("viewWillSetYRangePadding", newYRangePadding, oldYRangePadding, this.owner);
},
didSetValue(newYRangePadding: readonly [number, number], oldYRangePadding: readonly [number, number]): void {
this.owner.callObservers("viewDidSetYRangePadding", newYRangePadding, oldYRangePadding, this.owner);
},
equalValues(newYRangePadding: readonly [number, number], oldYRangePadding: readonly [number, number]): boolean {
return Arrays.equal(newYRangePadding, oldYRangePadding);
},
})
readonly yRangePadding!: Property<this, readonly [number, number]>
@Property<ScaledView<X, Y>, readonly [number, number], number>({
type: Object,
initValue(): readonly [number, number] {
return [1.0, 0.5];
},
equalValues(newFitAlign: readonly [number, number], oldFitAlign: readonly [number, number]): boolean {
return Arrays.equal(newFitAlign, oldFitAlign);
},
fromAny(value: readonly [number, number] | number): readonly [number, number] {
if (typeof value === "number") {
return [value, value];
} else {
return value;
}
},
})
readonly fitAlign!: Property<this, readonly [number, number], number>;
xFitAlign(): number;
xFitAlign(xFitAlign: number): this;
xFitAlign(xFitAlign?: number): number | this {
const fitAlign = this.fitAlign.value;
if (xFitAlign === void 0) {
return fitAlign[0];
} else {
this.fitAlign.setValue([xFitAlign, fitAlign[1]]);
return this;
}
}
yFitAlign(): number;
yFitAlign(yFitAlign: number): this;
yFitAlign(yFitAlign?: number): number | this {
const fitAlign = this.fitAlign.value;
if (yFitAlign === void 0) {
return fitAlign[0];
} else {
this.fitAlign.setValue([fitAlign[0], yFitAlign]);
return this;
}
}
@Property({type: Number})
readonly fitAspectRatio!: Property<this, number | undefined>;
preserveAspectRatio(): boolean;
preserveAspectRatio(preserveAspectRatio: boolean): this;
preserveAspectRatio(preserveAspectRatio?: boolean): boolean | this {
if (preserveAspectRatio === void 0) {
return this.gesture.preserveAspectRatio;
} else {
this.gesture.preserveAspectRatio = preserveAspectRatio;
return this;
}
}
domainTracking(): readonly [boolean, boolean];
domainTracking(domainTracking: readonly [boolean, boolean] | boolean): this;
domainTracking(xDomainTracking: boolean, yDomainTracking: boolean): this;
domainTracking(xDomainTracking?: readonly [boolean, boolean] | boolean,
yDomainTracking?: boolean): readonly [boolean, boolean] | this {
if (xDomainTracking === void 0) {
return [(this.scaledFlags & ScaledView.XDomainTrackingFlag) !== 0,
(this.scaledFlags & ScaledView.YDomainTrackingFlag) !== 0];
} else {
if (Array.isArray(xDomainTracking)) {
yDomainTracking = xDomainTracking[1] as boolean;
xDomainTracking = xDomainTracking[0] as boolean;
} else if (yDomainTracking === void 0) {
yDomainTracking = xDomainTracking as boolean;
}
if (xDomainTracking as boolean) {
this.setScaledFlags(this.scaledFlags | ScaledView.XDomainTrackingFlag);
} else {
this.setScaledFlags(this.scaledFlags & ~ScaledView.XDomainTrackingFlag);
}
if (yDomainTracking) {
this.setScaledFlags(this.scaledFlags | ScaledView.YDomainTrackingFlag);
} else {
this.setScaledFlags(this.scaledFlags & ~ScaledView.YDomainTrackingFlag);
}
return this;
}
}
xDomainTracking(): boolean;
xDomainTracking(xDomainTracking: boolean): this;
xDomainTracking(xDomainTracking?: boolean): boolean | this {
if (xDomainTracking === void 0) {
return (this.scaledFlags & ScaledView.XDomainTrackingFlag) !== 0;
} else {
if (xDomainTracking) {
this.setScaledFlags(this.scaledFlags | ScaledView.XDomainTrackingFlag);
} else {
this.setScaledFlags(this.scaledFlags & ~ScaledView.XDomainTrackingFlag);
}
return this;
}
}
yDomainTracking(): boolean;
yDomainTracking(yDomainTracking: boolean): this;
yDomainTracking(yDomainTracking?: boolean): boolean | this {
if (yDomainTracking === void 0) {
return (this.scaledFlags & ScaledView.YDomainTrackingFlag) !== 0;
} else {
if (yDomainTracking) {
this.setScaledFlags(this.scaledFlags | ScaledView.YDomainTrackingFlag);
} else {
this.setScaledFlags(this.scaledFlags & ~ScaledView.YDomainTrackingFlag);
}
return this;
}
}
scaleGestures(): readonly [boolean, boolean];
scaleGestures(scaleGestures: readonly [boolean, boolean] | boolean): this;
scaleGestures(xScaleGestures: boolean, yScaleGestures: boolean): this;
scaleGestures(xScaleGestures?: readonly [boolean, boolean] | boolean,
yScaleGestures?: boolean): readonly [boolean, boolean] | this {
if (xScaleGestures === void 0) {
return [(this.scaledFlags & ScaledView.XScaleGesturesFlag) !== 0,
(this.scaledFlags & ScaledView.YScaleGesturesFlag) !== 0];
} else {
if (Array.isArray(xScaleGestures)) {
yScaleGestures = xScaleGestures[1] as boolean;
xScaleGestures = xScaleGestures[0] as boolean;
} else if (yScaleGestures === void 0) {
yScaleGestures = xScaleGestures as boolean;
}
if (xScaleGestures as boolean) {
this.setScaledFlags(this.scaledFlags | ScaledView.XScaleGesturesFlag);
} else {
this.setScaledFlags(this.scaledFlags & ~ScaledView.XScaleGesturesFlag);
}
if (yScaleGestures) {
this.setScaledFlags(this.scaledFlags | ScaledView.YScaleGesturesFlag);
} else {
this.setScaledFlags(this.scaledFlags & ~ScaledView.YScaleGesturesFlag);
}
return this;
}
}
xScaleGestures(): boolean;
xScaleGestures(xScaleGestures: boolean): this;
xScaleGestures(xScaleGestures?: boolean): boolean | this {
if (xScaleGestures === void 0) {
return (this.scaledFlags & ScaledView.XScaleGesturesFlag) !== 0;
} else {
if (xScaleGestures) {
this.setScaledFlags(this.scaledFlags | ScaledView.XScaleGesturesFlag);
} else {
this.setScaledFlags(this.scaledFlags & ~ScaledView.XScaleGesturesFlag);
}
return this;
}
}
yScaleGestures(): boolean;
yScaleGestures(yScaleGestures: boolean): this;
yScaleGestures(yScaleGestures?: boolean): boolean | this {
if (yScaleGestures === void 0) {
return (this.scaledFlags & ScaledView.YScaleGesturesFlag) !== 0;
} else {
if (yScaleGestures) {
this.setScaledFlags(this.scaledFlags | ScaledView.YScaleGesturesFlag);
} else {
this.setScaledFlags(this.scaledFlags & ~ScaledView.YScaleGesturesFlag);
}
return this;
}
}
@Property<ScaledView<X, Y>, Timing | boolean | undefined, AnyTiming>({
type: Timing,
inherits: true,
initValue(): Timing | boolean | undefined {
return Easing.linear.withDuration(250);
},
})
readonly rescaleTransition!: Property<this, Timing | boolean | undefined, AnyTiming>;
@Property<ScaledView<X, Y>, Timing | boolean | undefined, AnyTiming>({
type: Timing,
inherits: true,
initValue(): Timing | boolean | undefined {
return Easing.cubicOut.withDuration(250);
},
})
readonly reboundTransition!: Property<this, Timing | boolean | undefined, AnyTiming>;
fitX(tween: boolean = false): void {
this.setScaledFlags(this.scaledFlags | ScaledView.XFitFlag);
if (tween === true) {
this.setScaledFlags(this.scaledFlags | ScaledView.XFitTweenFlag);
}
this.requireUpdate(View.NeedsLayout);
}
fitY(tween: boolean = false): void {
this.setScaledFlags(this.scaledFlags | ScaledView.YFitFlag);
if (tween === true) {
this.setScaledFlags(this.scaledFlags | ScaledView.YFitTweenFlag);
}
this.requireUpdate(View.NeedsLayout);
}
fit(tween: boolean = false): void {
this.setScaledFlags(this.scaledFlags | (ScaledView.XFitFlag | ScaledView.YFitFlag));
if (tween === true) {
this.setScaledFlags(this.scaledFlags | ScaledView.FitTweenMask);
}
this.requireUpdate(View.NeedsLayout);
}
/** @internal */
@ViewSet<ScaledView<X, Y>, ScaledXView<X> | ScaledYView<Y>>({
binds: true,
observes: true,
willAttachView(scaledView: ScaledXView<X> | ScaledYView<Y>, targetView: View | null): void {
this.owner.callObservers("viewWillAttachScaled", scaledView, targetView, this.owner);
},
didAttachView(newScaledView: ScaledXView<X> | ScaledYView<Y>): void {
if (ScaledXView.is<X>(newScaledView)) {
this.owner.updateXDataDomain(newScaledView.xDataDomain);
}
if (ScaledYView.is<Y>(newScaledView)) {
this.owner.updateYDataDomain(newScaledView.yDataDomain);
}
},
didDetachView(scaledView: ScaledXView<X> | ScaledYView<Y>): void {
this.owner.callObservers("viewDidDetachScaled", scaledView, this.owner);
},
viewDidSetXRangePadding(newXRangePadding: readonly [number, number], oldXRangePadding: readonly [number, number]): void {
this.owner.updateXRangePadding(newXRangePadding);
},
viewDidSetYRangePadding(newYRangePadding: readonly [number, number], oldYRangePadding: readonly [number, number]): void {
this.owner.updateYRangePadding(newYRangePadding);
},
viewDidSetXDataDomain(newXDataDomain: Domain<X> | null, oldXDataDomain: Domain<X> | null): void {
this.owner.updateXDataDomain(newXDataDomain);
this.owner.requireUpdate(View.NeedsLayout);
},
viewDidSetYDataDomain(newYDataDomain: Domain<Y> | null, oldYDataDomain: Domain<Y> | null): void {
this.owner.updateYDataDomain(newYDataDomain);
this.owner.requireUpdate(View.NeedsLayout);
},
detectView(view: View): ScaledXView<X> | ScaledYView<Y> | null {
return ScaledXView.is<X>(view) || ScaledYView.is<Y>(view) ? view : null;
},
})
readonly scaled!: ViewSet<this, ScaledXView<X> | ScaledYView<Y>>;
static readonly scaled: MemberFastenerClass<ScaledView, "scaled">;
protected override onLayout(viewContext: ViewContextType<this>): void {
super.onLayout(viewContext);
this.xScale.recohere(viewContext.updateTime);
this.yScale.recohere(viewContext.updateTime);
this.resizeScales();
this.updateScales();
}
/**
* Updates own scale ranges to project onto view frame. Infers own scales
* from child view data domains if inherited x/y scales are undefined.
*/
protected resizeScales(): void {
let xScale: ContinuousScale<X, number> | null;
const xRange = this.xRange();
if (xRange !== null) {
xScale = !this.xScale.inherited ? this.xScale.state : null;
if (xScale !== null) {
if (!xScale.range.equals(xRange)) {
this.xScale.setRange(xRange);
this.setScaledFlags(this.scaledFlags | ScaledView.RescaleFlag);
}
} else if (this.xScale.superFastener === null || this.xScale.superValue === null) {
const xDataDomainPadded = this.xDataDomainPadded;
if (xDataDomainPadded !== null) {
xScale = ScaledView.createScale(xDataDomainPadded[0], xDataDomainPadded[1], xRange[0], xRange[1]);
this.xScale.setState(xScale);
this.setScaledFlags(this.scaledFlags | ScaledView.XFitFlag);
}
}
}
let yScale: ContinuousScale<Y, number> | null;
const yRange = this.yRange();
if (yRange !== null) {
yScale = !this.yScale.inherited ? this.yScale.state : null;
if (yScale !== null) {
if (!yScale.range.equals(yRange)) {
this.yScale.setRange(yRange);
this.setScaledFlags(this.scaledFlags | ScaledView.RescaleFlag);
}
} else if (this.yScale.superFastener === null || this.yScale.superValue === null) {
const yDataDomainPadded = this.yDataDomainPadded;
if (yDataDomainPadded !== null) {
yScale = ScaledView.createScale(yDataDomainPadded[0], yDataDomainPadded[1], yRange[0], yRange[1]);
this.yScale.setState(yScale);
this.setScaledFlags(this.scaledFlags | ScaledView.YFitFlag);
}
}
}
}
protected updateScales(): void {
const xScale = !this.xScale.inherited ? this.xScale.state : null;
const yScale = !this.yScale.inherited ? this.yScale.state : null;
if (xScale !== null && yScale !== null) {
const isPressing = this.gesture.pressing;
if (!isPressing) {
const isCoasting = this.gesture.coasting;
this.boundScales(xScale, yScale, isCoasting);
}
}
}
/**
* Clamps scales to domain bounds and corrects aspect ratio.
*/
protected boundScales(oldXScale: ContinuousScale<X, number>,
oldYScale: ContinuousScale<Y, number>,
isCoasting: boolean): void {
const oldXDomain = oldXScale.domain;
const oldYDomain = oldYScale.domain;
let newXDomain: Domain<X> = oldXDomain;
let newYDomain: Domain<Y> = oldYDomain;
// fit x domain
const xDataDomainPadded = this.xDataDomainPadded;
if (xDataDomainPadded !== null && (this.scaledFlags & ScaledView.XFitFlag) !== 0) {
newXDomain = xDataDomainPadded;
this.setScaledFlags(this.scaledFlags | ScaledView.XInRangeMask);
} else {
newXDomain = oldXDomain;
}
// fit y domain
const yDataDomainPadded = this.yDataDomainPadded;
if (yDataDomainPadded !== null && (this.scaledFlags & ScaledView.YFitFlag) !== 0) {
newYDomain = yDataDomainPadded;
this.setScaledFlags(this.scaledFlags | ScaledView.YInRangeMask);
} else {
newYDomain = oldYDomain;
}
// clamp x domain
const xDomainPadded = xDataDomainPadded !== null ? xDataDomainPadded : oldXDomain;
const xDomainBounds = this.xDomainBounds.value;
const xDomainMin = xDomainBounds[0] === false ? void 0
: xDomainBounds[0] === true ? xDomainPadded[0]
: xDomainBounds[0];
const xDomainMax = xDomainBounds[1] === false ? void 0
: xDomainBounds[1] === true ? xDomainPadded[1]
: xDomainBounds[1];
const xZoomBounds = this.xZoomBounds.value;
let xZoomMin: number | boolean | undefined = xZoomBounds[0];
let xZoomMax: number | boolean | undefined = xZoomBounds[1];
if (xZoomMin === true) {
if (oldXScale instanceof LinearScale) {
xZoomMin = ScaledView.LinearZoomMin;
} else if (oldXScale instanceof TimeScale) {
xZoomMin = ScaledView.TimeZoomMin;
} else {
xZoomMin = void 0;
}
} else if (xZoomMin === false) {
xZoomMin = void 0;
}
if (xZoomMax === true) {
if (oldXScale instanceof LinearScale) {
xZoomMax = ScaledView.LinearZoomMax;
} else if (oldXScale instanceof TimeScale) {
xZoomMax = ScaledView.TimeZoomMax;
} else {
xZoomMax = void 0;
}
} else if (xZoomMax === false) {
xZoomMax = void 0;
}
newXDomain = oldXScale.clampDomain(xDomainMin, xDomainMax, xZoomMin, xZoomMax).domain;
// clamp y domain
const yDomainPadded = yDataDomainPadded !== null ? yDataDomainPadded : oldYDomain;
const yDomainBounds = this.yDomainBounds.value;
const yDomainMin = yDomainBounds[0] === false ? void 0
: yDomainBounds[0] === true ? yDomainPadded[0]
: yDomainBounds[0];
const yDomainMax = yDomainBounds[1] === false ? void 0
: yDomainBounds[1] === true ? yDomainPadded[1]
: yDomainBounds[1];
const yZoomBounds = this.yZoomBounds.value;
let yZoomMin: number | boolean | undefined = yZoomBounds[0];
let yZoomMax: number | boolean | undefined = yZoomBounds[1];
if (yZoomMin === true) {
if (oldYScale instanceof LinearScale) {
yZoomMin = ScaledView.LinearZoomMin;
} else if (oldYScale instanceof TimeScale) {
yZoomMin = ScaledView.TimeZoomMin;
} else {
yZoomMin = void 0;
}
} else if (yZoomMin === false) {
yZoomMin = void 0;
}
if (yZoomMax === true) {
if (oldYScale instanceof LinearScale) {
yZoomMax = ScaledView.LinearZoomMax;
} else if (oldYScale instanceof TimeScale) {
yZoomMax = ScaledView.TimeZoomMax;
} else {
yZoomMax = void 0;
}
} else if (yZoomMax === false) {
yZoomMax = void 0;
}
newYDomain = oldYScale.clampDomain(yDomainMin, yDomainMax, yZoomMin, yZoomMax).domain;
// track x domain
if (xDataDomainPadded !== null && !isCoasting &&
(this.scaledFlags & ScaledView.XDomainTrackingFlag) !== 0 &&
(this.scaledFlags & ScaledView.XInRangeMask) !== 0) {
if ((this.scaledFlags & ScaledView.XInRangeMask) === ScaledView.XInRangeMask) {
newXDomain = xDataDomainPadded;
} else {
const xDomainWidth = +newXDomain[1] - +newXDomain[0] as unknown as X;
if ((this.scaledFlags & ScaledView.XMinInRangeFlag) !== 0) {
newXDomain = Domain(xDataDomainPadded[0], +xDataDomainPadded[0] + +xDomainWidth as unknown as X);
} else {
newXDomain = Domain(+xDataDomainPadded[1] - +xDomainWidth as unknown as X, xDataDomainPadded[1]);
}
}
}
// track y domain
if (yDataDomainPadded !== null && !isCoasting &&
(this.scaledFlags & ScaledView.YDomainTrackingFlag) !== 0 &&
(this.scaledFlags & ScaledView.YInRangeMask) !== 0) {
if ((this.scaledFlags & ScaledView.YInRangeMask) === ScaledView.YInRangeMask) {
newYDomain = yDataDomainPadded;
} else {
const yDomainWidth = +newYDomain[1] - +newYDomain[0] as unknown as Y;
if ((this.scaledFlags & ScaledView.YMinInRangeFlag) !== 0) {
newYDomain = Domain(yDataDomainPadded[0], +yDataDomainPadded[0] + +yDomainWidth as unknown as Y);
} else {
newYDomain = Domain(+yDataDomainPadded[1] - +yDomainWidth as unknown as Y, yDataDomainPadded[1]);
}
}
}
// fit aspect ratio
const fitAspectRatio = this.fitAspectRatio.value;
if (fitAspectRatio !== void 0 && (this.gesture.preserveAspectRatio || (this.scaledFlags & ScaledView.FitMask) !== 0)) {
const xRange = oldXScale.range;
const yRange = oldYScale.range;
const oldDomainWidth = +newXDomain[1] - +newXDomain[0];
const oldDomainHeight = +newYDomain[1] - +newYDomain[0];
const domainAspectRatio = oldDomainWidth / oldDomainHeight;
const rangeAspectRatio = (xRange[1] - xRange[0]) / (yRange[0] - yRange[1]);
const anamorphicAspectRatio = Math.abs(fitAspectRatio * rangeAspectRatio);
if (!Equivalent(domainAspectRatio, anamorphicAspectRatio)) {
const fitAlign = this.fitAlign.value;
if (fitAspectRatio < 0 && domainAspectRatio < anamorphicAspectRatio ||
fitAspectRatio > 0 && domainAspectRatio > anamorphicAspectRatio) {
const newDomainWidth = oldDomainHeight * anamorphicAspectRatio;
const dx = newDomainWidth - oldDomainWidth;
newXDomain = Domain(+newXDomain[0] - dx * fitAlign[0] as unknown as X,
+newXDomain[1] + dx * (1 - fitAlign[0]) as unknown as X);
} else {
const newDomainHeight = oldDomainWidth / anamorphicAspectRatio;
const dy = newDomainHeight - oldDomainHeight;
newYDomain = Domain(+newYDomain[0] - dy * fitAlign[1] as unknown as Y,
+newYDomain[1] + dy * (1 - fitAlign[1]) as unknown as Y);
}
}
}
// update x domain
if ((this.scaledFlags & ScaledView.XBoundingFlag) === 0 && !Equivalent(newXDomain, oldXDomain)) {
let timing: Timing | boolean | undefined;
if ((this.scaledFlags & (ScaledView.XFitFlag | ScaledView.RescaleFlag)) === 0 ||
(this.scaledFlags & ScaledView.XFitTweenFlag) !== 0) {
timing = (this.scaledFlags & ScaledView.InteractingMask) !== 0
? this.reboundTransition.value : this.rescaleTransition.value;
if (timing !== void 0 && timing !== false) {
this.setScaledFlags(this.scaledFlags | ScaledView.XBoundingFlag);
}
}
this.willReboundX(oldXScale);
this.xDomain(newXDomain, timing);
if (timing === void 0 && timing !== false) {
this.didReboundX(this.xScale.getState());
}
if (xDataDomainPadded !== null && (this.scaledFlags & ScaledView.XFitFlag) !== 0) {
this.setScaledFlags(this.scaledFlags & ~ScaledView.XFitFlag);
}
}
// update y domain
if ((this.scaledFlags & ScaledView.YBoundingFlag) === 0 && !Equivalent(newYDomain, oldYDomain)) {
let timing: Timing | boolean | undefined;
if ((this.scaledFlags & (ScaledView.YFitFlag | ScaledView.RescaleFlag)) === 0 ||
(this.scaledFlags & ScaledView.YFitTweenFlag) !== 0) {
timing = (this.scaledFlags & ScaledView.InteractingMask) !== 0
? this.reboundTransition.value : this.rescaleTransition.value;
if (timing !== void 0 && timing !== false) {
this.setScaledFlags(this.scaledFlags | ScaledView.YBoundingFlag);
}
}
this.willReboundY(oldYScale);
this.yDomain(newYDomain, timing);
if (timing === void 0 && timing !== false) {
this.didReboundY(this.yScale.getState());
}
if (yDataDomainPadded !== null && (this.scaledFlags & ScaledView.YFitFlag) !== 0) {
this.setScaledFlags(this.scaledFlags & ~ScaledView.YFitFlag);
}
}
this.setScaledFlags(this.scaledFlags & ~(ScaledView.InteractedFlag | ScaledView.RescaleFlag));
}
protected override displayChildren(displayFlags: ViewFlags, viewContext: ViewContextType<this>,
displayChild: (this: this, childView: View, displayFlags: ViewFlags,
viewContext: ViewContextType<this>) => void): void {
let xScale: ContinuousScale<X, number> | null;
let yScale: ContinuousScale<Y, number> | null;
if ((displayFlags & View.NeedsLayout) !== 0 &&
(xScale = this.xScale.value, xScale !== null) &&
(yScale = this.yScale.value, yScale !== null)) {
this.layoutChildViews(xScale, yScale, displayFlags, viewContext, displayChild);
} else {
super.displayChildren(displayFlags, viewContext, displayChild);
}
}
protected layoutChildViews(xScale: ContinuousScale<X, number>,
yScale: ContinuousScale<Y, number>,
displayFlags: ViewFlags, viewContext: ViewContextType<this>,
displayChild: (this: this, childView: View, displayFlags: ViewFlags,
viewContext: ViewContextType<this>) => void): void {
// Recompute extrema when laying out child views.
let xDataDomainMin: X | undefined;
let xDataDomainMax: X | undefined;
let yDataDomainMin: Y | undefined;
let yDataDomainMax: Y | undefined;
let xRangePaddingMin = 0;
let xRangePaddingMax = 0;
let yRangePaddingMin = 0;
let yRangePaddingMax = 0;
let xCount = 0;
let yCount = 0;
type self = this;
function layoutChildView(this: self, childView: View, displayFlags: ViewFlags,
viewContext: ViewContextType<self>): void {
displayChild.call(this, childView, displayFlags, viewContext);
if (ScaledXView.is<X>(childView) && childView.xScale.inherited) {
const childXDataDomain = childView.xDataDomain;
if (childXDataDomain !== null) {
if (xCount !== 0) {
if (Values.compare(childXDataDomain[0], xDataDomainMin) < 0) {
xDataDomainMin = childXDataDomain[0];
}
if (Values.compare(xDataDomainMax, childXDataDomain[1]) < 0) {
xDataDomainMax = childXDataDomain[1];
}
} else {
xDataDomainMin = childXDataDomain[0];
xDataDomainMax = childXDataDomain[1];
}
const childXRangePadding = childView.xRangePadding();
xRangePaddingMin = Math.max(childXRangePadding[0], xRangePaddingMin);
xRangePaddingMax = Math.max(childXRangePadding[1], xRangePaddingMax);
xCount += 1;
}
}
if (ScaledYView.is<Y>(childView) && childView.yScale.inherited) {
const childYDataDomain = childView.yDataDomain;
if (childYDataDomain !== null) {
if (yCount !== 0) {
if (Values.compare(childYDataDomain[0], yDataDomainMin) < 0) {
yDataDomainMin = childYDataDomain[0];
}
if (Values.compare(yDataDomainMax, childYDataDomain[1]) < 0) {
yDataDomainMax = childYDataDomain[1];
}
} else {
yDataDomainMin = childYDataDomain[0];
yDataDomainMax = childYDataDomain[1];
}
const childYRangePadding = childView.yRangePadding();
yRangePaddingMin = Math.max(childYRangePadding[0], yRangePaddingMin);
yRangePaddingMax = Math.max(childYRangePadding[1], yRangePaddingMax);
yCount += 1;
}
}
}
super.displayChildren(displayFlags, viewContext, layoutChildView);
this.setXDataDomain(xCount !== 0 ? Domain<X>(xDataDomainMin!, xDataDomainMax!) : null);
this.setYDataDomain(yCount !== 0 ? Domain<Y>(yDataDomainMin!, yDataDomainMax!) : null);
this.xRangePadding.setValue([xRangePaddingMin, xRangePaddingMax], Affinity.Intrinsic);
this.yRangePadding.setValue([yRangePaddingMin, yRangePaddingMax], Affinity.Intrinsic);
}
protected onBeginBoundingXScale(xScale: ContinuousScale<X, number>): void {
// hook
}
protected onEndBoundingXScale(xScale: ContinuousScale<X, number>): void {
this.setScaledFlags(this.scaledFlags & ~ScaledView.XBoundingFlag);
this.didReboundX(xScale);
}
protected onInterruptBoundingXScale(xScale: ContinuousScale<X, number>): void {
this.setScaledFlags(this.scaledFlags & ~ScaledView.XBoundingFlag);
this.didReboundX(xScale);
}
protected onBeginBoundingYScale(yScale: ContinuousScale<Y, number>): void {
// hook
}
protected onEndBoundingYScale(yScale: ContinuousScale<Y, number>): void {
this.setScaledFlags(this.scaledFlags & ~ScaledView.YBoundingFlag);
this.didReboundY(yScale);
}
protected onInterruptBoundingYScale(yScale: ContinuousScale<Y, number>): void {
this.setScaledFlags(this.scaledFlags & ~ScaledView.YBoundingFlag);
this.didReboundY(yScale);
}
protected willReboundX(xScale: ContinuousScale<X, number>): void {
this.gesture.neutralizeX();
}
protected didReboundX(xScale: ContinuousScale<X, number>): void {
// hook
}
protected willReboundY(yScale: ContinuousScale<Y, number>): void {
this.gesture.neutralizeY();
}
protected didReboundY(yScale: ContinuousScale<Y, number>): void {
// hook
}
@ScaleGesture<ScaledView, ScaledView, unknown, unknown>({
self: true,
getXScale(): ContinuousScale<unknown, number> | null {
if ((this.owner.scaledFlags & ScaledView.XScaleGesturesFlag) !== 0) {
return this.owner.xScale();
} else {
return null;
}
},
setXScale(xScale: ContinuousScale<unknown, number> | null, timing?: AnyTiming | boolean): void {
if ((this.owner.scaledFlags & ScaledView.XScaleGesturesFlag) !== 0) {
this.owner.xScale(xScale, timing);
}
},
getYScale(): ContinuousScale<unknown, number> | null {
if ((this.owner.scaledFlags & ScaledView.YScaleGesturesFlag) !== 0) {
return this.owner.yScale();
} else {
return null;
}
},
setYScale(yScale: ContinuousScale<unknown, number> | null, timing?: AnyTiming | boolean): void {
if ((this.owner.scaledFlags & ScaledView.YScaleGesturesFlag) !== 0) {
this.owner.yScale(yScale, timing);
}
},
willStartInteracting(): void {
this.owner.setScaledFlags(this.owner.scaledFlags & ~ScaledView.BoundingMask | ScaledView.InteractingFlag);
},
didStopInteracting(): void {
const xScale = this.owner.xScale.value;
const xDataDomain = this.owner.xDataDomain;
if (xScale !== null && xDataDomain !== null) {
const xDomain = xScale.domain;
if (xDomain.contains(xDataDomain[0])) {
this.owner.setScaledFlags(this.owner.scaledFlags | ScaledView.XMinInRangeFlag);
} else {
this.owner.setScaledFlags(this.owner.scaledFlags & ~ScaledView.XMinInRangeFlag);
}
if (xDomain.contains(xDataDomain[1])) {
this.owner.setScaledFlags(this.owner.scaledFlags | ScaledView.XMaxInRangeFlag);
} else {
this.owner.setScaledFlags(this.owner.scaledFlags & ~ScaledView.XMaxInRangeFlag);
}
}
const yScale = this.owner.yScale.value;
const yDataDomain = this.owner.yDataDomain;
if (yScale !== null && yDataDomain !== null) {
const yDomain = yScale.domain;
if (yDomain.contains(yDataDomain[0])) {
this.owner.setScaledFlags(this.owner.scaledFlags | ScaledView.YMinInRangeFlag);
} else {
this.owner.setScaledFlags(this.owner.scaledFlags & ~ScaledView.YMinInRangeFlag);
}
if (yDomain.contains(yDataDomain[1])) {
this.owner.setScaledFlags(this.owner.scaledFlags | ScaledView.YMaxInRangeFlag);
} else {
this.owner.setScaledFlags(this.owner.scaledFlags & ~ScaledView.YMaxInRangeFlag);
}
}
this.owner.setScaledFlags(this.owner.scaledFlags & ~ScaledView.InteractingFlag | ScaledView.InteractedFlag);
},
didStopPressing(): void {
this.owner.requireUpdate(View.NeedsLayout);
},
willBeginCoast(input: ScaleGestureInput, event: Event | null): boolean | void {
if ((this.owner.scaledFlags & ScaledView.XScaleGesturesFlag) === 0) {
input.disableX = true;
input.vx = 0;
input.ax = 0;
}
if ((this.owner.scaledFlags & ScaledView.YScaleGesturesFlag) === 0) {
input.disableY = true;
input.vy = 0;
input.ay = 0;
}
},
})
readonly gesture!: ScaleGesture<this, ScaledView<X, Y>, X, Y>;
static readonly gesture: MemberFastenerClass<ScaledView, "gesture">;
override init(init: ScaledViewInit<X, Y>): void {
super.init(init);
if (init.xScale !== void 0) {
this.xScale(init.xScale);
}
if (init.yScale !== void 0) {
this.yScale(init.yScale);
}
if (init.xDomainBounds !== void 0) {
this.xDomainBounds(init.xDomainBounds);
}
if (init.yDomainBounds !== void 0) {
this.yDomainBounds(init.yDomainBounds);
}
if (init.xZoomBounds !== void 0) {
this.xZoomBounds(init.xZoomBounds);
}
if (init.yZoomBounds !== void 0) {
this.yZoomBounds(init.yZoomBounds);
}
if (init.xDomainPadding !== void 0) {
this.xDomainPadding(init.xDomainPadding);
}
if (init.yDomainPadding !== void 0) {
this.yDomainPadding(init.yDomainPadding);
}
if (init.xRangePadding !== void 0) {
this.xRangePadding(init.xRangePadding);
}
if (init.yRangePadding !== void 0) {
this.yRangePadding(init.yRangePadding);
}
if (init.fitAlign !== void 0) {
this.fitAlign(init.fitAlign);
}
if (init.xFitAlign !== void 0) {
this.xFitAlign(init.xFitAlign);
}
if (init.yFitAlign !== void 0) {
this.yFitAlign(init.yFitAlign);
}
if (init.fitAspectRatio !== void 0) {
this.fitAspectRatio(init.fitAspectRatio);
}
if (init.preserveAspectRatio !== void 0) {
this.preserveAspectRatio(init.preserveAspectRatio);
}
if (init.domainTracking !== void 0) {
this.domainTracking(init.domainTracking);
}
if (init.xDomainTracking !== void 0) {
this.xDomainTracking(init.xDomainTracking);
}
if (init.yDomainTracking !== void 0) {
this.yDomainTracking(init.yDomainTracking);
}
if (init.scaleGestures !== void 0) {
this.scaleGestures(init.scaleGestures);
}
if (init.xScaleGestures !== void 0) {
this.xScaleGestures(init.xScaleGestures);
}
if (init.yScaleGestures !== void 0) {
this.yScaleGestures(init.yScaleGestures);
}
if (init.rescaleTransition !== void 0) {
this.rescaleTransition.setValue(init.rescaleTransition);
}
if (init.reboundTransition !== void 0) {
this.reboundTransition.setValue(init.reboundTransition);
}
}
/** @internal */
static createScale<X, Y>(x0: X, x1: X, y0: Y | undefined, y1: Y | undefined): ContinuousScale<X, Y> {
let range: LinearRange;
if (typeof y0 === "number" && typeof y1 === "number") {
range = LinearRange(y0, y1);
} else {
range = LinearRange(0, 1);
}
if (typeof x0 === "number" && typeof x1 === "number") {
return LinearScale(LinearDomain(x0, x1), range) as unknown as ContinuousScale<X, Y>;
} else if (x0 instanceof DateTime && x1 instanceof DateTime) {
return TimeScale(TimeDomain(x0, x1), range) as unknown as ContinuousScale<X, Y>;
} else {
throw new TypeError(x0 + ", " + x1 + ", " + y0 + ", " + y1);
}
}
/** @internal */
static parseScale<X, Y>(string: string): ContinuousScale<X, Y> {
if (string === "linear") {
return LinearScale(LinearDomain(0, 1), LinearRange(0, 1)) as unknown as ContinuousScale<X, Y>;
} else if (string === "time") {
const d1 = DateTime.current();
const d0 = d1.withDay(d1.day - 1);
return TimeScale(TimeDomain(d0, d1), LinearRange(0, 1)) as unknown as ContinuousScale<X, Y>;
} else {
const domain = string.split("...");
const x0 = +domain[0]!;
const x1 = +domain[1]!;
if (isFinite(x0) && isFinite(x1)) {
return LinearScale(LinearDomain(x0, x1), LinearRange(0, 1)) as unknown as ContinuousScale<X, Y>;
} else {
const d0 = DateTime.parse(domain[0]!);
const d1 = DateTime.parse(domain[1]!);
return TimeScale(TimeDomain(d0, d1), LinearRange(0, 1)) as unknown as ContinuousScale<X, Y>;
}
}
throw new TypeError("" + string);
}
/** @internal */
static readonly XDomainTrackingFlag: ScaledFlags = 1 << 0;
/** @internal */
static readonly YDomainTrackingFlag: ScaledFlags = 1 << 1;
/** @internal */
static readonly XScaleGesturesFlag: ScaledFlags = 1 << 2;
/** @internal */
static readonly YScaleGesturesFlag: ScaledFlags = 1 << 3;
/** @internal */
static readonly XMinInRangeFlag: ScaledFlags = 1 << 4;
/** @internal */
static readonly XMaxInRangeFlag: ScaledFlags = 1 << 5;
/** @internal */
static readonly YMinInRangeFlag: ScaledFlags = 1 << 6;
/** @internal */
static readonly YMaxInRangeFlag: ScaledFlags = 1 << 7;
/** @internal */
static readonly InteractingFlag: ScaledFlags = 1 << 8;
/** @internal */
static readonly InteractedFlag: ScaledFlags = 1 << 9;
/** @internal */
static readonly XBoundingFlag: ScaledFlags = 1 << 10;
/** @internal */
static readonly YBoundingFlag: ScaledFlags = 1 << 11;
/** @internal */
static readonly XFitFlag: ScaledFlags = 1 << 12;
/** @internal */
static readonly YFitFlag: ScaledFlags = 1 << 13;
/** @internal */
static readonly XFitTweenFlag: ScaledFlags = 1 << 14;
/** @internal */
static readonly YFitTweenFlag: ScaledFlags = 1 << 15;
/** @internal */
static readonly RescaleFlag: ScaledFlags = 1 << 16;
/** @internal */
static readonly DomainTrackingMask: ScaledFlags = ScaledView.XDomainTrackingFlag
| ScaledView.YDomainTrackingFlag;
/** @internal */
static readonly ScaleGesturesMask: ScaledFlags = ScaledView.XScaleGesturesFlag
| ScaledView.YScaleGesturesFlag;
/** @internal */
static readonly XInRangeMask: ScaledFlags = ScaledView.XMinInRangeFlag
| ScaledView.XMaxInRangeFlag;
/** @internal */
static readonly YInRangeMask: ScaledFlags = ScaledView.YMinInRangeFlag
| ScaledView.YMaxInRangeFlag;
/** @internal */
static readonly InteractingMask: ScaledFlags = ScaledView.InteractingFlag
| ScaledView.InteractedFlag;
/** @internal */
static readonly BoundingMask: ScaledFlags = ScaledView.XBoundingFlag
| ScaledView.YBoundingFlag;
/** @internal */
static readonly FitMask: ScaledFlags = ScaledView.XFitFlag
| ScaledView.YFitFlag;
/** @internal */
static readonly FitTweenMask: ScaledFlags = ScaledView.XFitTweenFlag
| ScaledView.YFitTweenFlag;
/** @internal */
static LinearZoomMin: number = 1000000;
/** @internal */
static LinearZoomMax: number = 0.001;
/** @internal */
static TimeZoomMin: number = 86400000;
/** @internal */
static TimeZoomMax: number = 1;
static override readonly InsertChildFlags: ViewFlags = GraphicsView.InsertChildFlags | View.NeedsResize;
} | the_stack |
import * as net from "net";
import * as dgram from "dgram";
import { parse as urlParse } from "url";
import { EventEmitter } from "events";
import {
parseRTPPacket,
parseRTCPPacket,
getMD5Hash,
parseTransport,
generateSSRC,
} from "./util";
import * as transform from "sdp-transform";
const RTP_AVP = "RTP/AVP";
const STATUS_OK = 200;
const STATUS_UNAUTH = 401;
const WWW_AUTH = "WWW-Authenticate";
const WWW_AUTH_REGEX = new RegExp('([a-zA-Z]+)\s*=\s*"?((?<=").*?(?=")|.*?(?=,?\s*[a-zA-Z]+\s*\=)|.+[^=])', "g");
enum ReadStates {
SEARCHING,
READING_RTSP_HEADER,
READING_RTSP_PAYLOAD,
READING_RAW_PACKET_SIZE,
READING_RAW_PACKET,
}
type Connection = "udp" | "tcp";
type Headers = {
[key: string]: string | number | undefined;
Session?: string;
Location?: string;
CSeq?: number;
"WWW-Authenticate"?: string;
Transport?: string;
Unsupported?: string;
};
export default class RTSPClient extends EventEmitter {
username: string;
password: string;
headers: { [key: string]: string };
isConnected: boolean = false;
// These are all set in #connect or #_netConnect.
_url?: string;
_client?: net.Socket;
_cSeq: number = 0;
_unsupportedExtensions?: string[];
// Example: 'SessionId'[';timeout=seconds']
_session?: string;
_keepAliveID?: any;
_nextFreeInterleavedChannel: number = 0;
_nextFreeUDPPort: number = 5000;
readState: ReadStates = ReadStates.SEARCHING;
// Used as a cache for the data stream.
// What's in here is based on current #readState.
messageBytes: number[] = [];
// Used for parsing RTSP responses,
// Content-Length header in the RTSP message.
rtspContentLength: number = 0;
rtspStatusLine: string = "";
rtspHeaders: Headers = {};
// Used for parsing RTP/RTCP responses.
rtspPacketLength: number = 0;
rtspPacket: Buffer = new Buffer("");
rtspPacketPointer: number = 0;
// Used in #_emptyReceiverReport.
clientSSRC = generateSSRC();
constructor(
username: string,
password: string,
headers?: { [key: string]: string }
) {
super();
this.username = username;
this.password = password;
this.headers = {
...(headers || {}),
"User-Agent": "yellowstone/3.x",
};
}
// This manages the lifecycle for the RTSP connection
// over TCP.
//
// Sets #_client.
//
// Handles receiving data & closing port, called during
// #connect.
_netConnect(hostname: string, port: number) {
return new Promise((resolve, reject) => {
// Set after listeners defined.
let client: net.Socket;
const errorListener = (err: any) => {
client.removeListener("error", errorListener);
reject(err);
};
const closeListener = () => {
client.removeListener("close", closeListener);
this.close(true);
};
const responseListener = (responseName: string, headers: Headers) => {
const name = responseName.split(" ")[0];
if (name.indexOf("RTSP/") === 0) {
return;
}
if (name === "REDIRECT" || name === "ANNOUNCE") {
this.respond("200 OK", { CSeq: headers.CSeq });
}
if (name === "REDIRECT" && headers.Location) {
this.close();
this.connect(headers.Location);
}
};
client = net.connect(port, hostname, () => {
this.isConnected = true;
this._client = client;
client.removeListener("error", errorListener);
this.on("response", responseListener);
resolve(this);
});
client.on("data", this._onData.bind(this));
client.on("error", errorListener);
client.on("close", closeListener);
});
}
async connect(
url: string,
{
keepAlive = true,
connection = "udp",
}: { keepAlive: boolean; connection?: Connection } = {
keepAlive: true,
connection: "udp",
}
) {
const { hostname, port } = urlParse((this._url = url));
if (!hostname) {
throw new Error("URL parsing error in connect method.");
}
let details: any = [];
await this._netConnect(hostname, parseInt(port || "554"));
await this.request("OPTIONS");
const describeRes = await this.request("DESCRIBE", {
Accept: "application/sdp",
});
if (!describeRes || !describeRes.mediaHeaders) {
throw new Error(
"No media headers on DESCRIBE; RTSP server is broken (sanity check)"
);
}
// For now, only RTP/AVP is supported.
const { media } = transform.parse(describeRes.mediaHeaders.join("\r\n"));
// Loop over the Media Streams in the SDP looking for Video or Audio
// In theory the SDP can contain multiple Video and Audio Streams. We only want one of each type
let hasVideo = false;
let hasAudio = false;
let hasMetaData = false;
for (let x = 0; x < media.length; x++) {
let needSetup = false;
let codec = "";
let mediaSource = media[x];
if (
mediaSource.type === "video" &&
mediaSource.protocol === RTP_AVP &&
// @ts-ignore
mediaSource.rtp[0].codec === "H264"
) {
this.emit("log", "H264 Video Stream Found in SDP", "");
if (hasVideo == false) {
needSetup = true;
hasVideo = true;
codec = "H264";
}
}
if (
mediaSource.type === "audio" &&
mediaSource.protocol === RTP_AVP &&
// @ts-ignore
mediaSource.rtp[0].codec === "mpeg4-generic" &&
// @ts-ignore
mediaSource.fmtp[0].config.includes("AAC")
) {
this.emit("log", "AAC Audio Stream Found in SDP", "");
if (hasAudio == false) {
needSetup = true;
hasAudio = true;
codec = "AAC";
}
}
if (
mediaSource.type === "application" &&
mediaSource.protocol === RTP_AVP &&
// @ts-ignore
mediaSource.rtp[0].codec.toLowerCase() === "vnd.onvif.metadata"
) {
this.emit("log", "ONVIF Meta Data Found in SDP", "");
if (hasMetaData == false) {
needSetup = true;
hasMetaData = true;
codec = "vnd.onvif.metadata";
}
}
if (needSetup) {
let streamurl = "";
// The 'control' in the SDP can be a relative or absolute uri
if (mediaSource.control) {
if (mediaSource.control.toLowerCase().startsWith("rtsp://")) {
// absolute path
streamurl = mediaSource.control;
} else {
// relative path
streamurl = this._url + "/" + mediaSource.control;
}
}
// Perform a SETUP on the streamurl
// either 'udp' RTP/RTCP packets
// or with 'tcp' RTP/TCP packets which are interleaved into the TCP based RTSP socket
let setupRes;
let rtpChannel;
let rtcpChannel;
if (connection === "udp") {
// Create a pair of UDP listeners, even numbered port for RTP
// and odd numbered port for RTCP
rtpChannel = this._nextFreeUDPPort;
rtcpChannel = this._nextFreeUDPPort + 1;
this._nextFreeUDPPort += 2;
const rtpPort = rtpChannel;
const rtpReceiver = dgram.createSocket("udp4");
rtpReceiver.on("message", (buf, remote) => {
const packet = parseRTPPacket(buf);
this.emit("data", rtpPort, packet.payload, packet);
});
const rtcpPort = rtcpChannel;
const rtcpReceiver = dgram.createSocket("udp4");
rtcpReceiver.on("message", (buf, remote) => {
const packet = parseRTCPPacket(buf);
this.emit("controlData", rtcpPort, packet);
const receiver_report = this._emptyReceiverReport();
this._sendUDPData(remote.address, remote.port, receiver_report);
});
// Block until both UDP sockets are open.
await new Promise((resolve) => {
rtpReceiver.bind(rtpPort, () => resolve({}));
});
await new Promise((resolve) => {
rtcpReceiver.bind(rtcpPort, () => resolve({}));
});
let setupHeader = {
Transport: `RTP/AVP;unicast;client_port=${rtpPort}-${rtcpPort}`,
};
if (this._session)
Object.assign(setupHeader, { Session: this._session });
setupRes = await this.request("SETUP", setupHeader, streamurl);
} else if (connection === "tcp") {
// channel 0, RTP
// channel 1, RTCP
rtpChannel = this._nextFreeInterleavedChannel;
rtcpChannel = this._nextFreeInterleavedChannel + 1;
this._nextFreeInterleavedChannel += 2;
let setupHeader = {
Transport: `RTP/AVP/TCP;interleaved=${rtpChannel}-${rtcpChannel}`,
};
if (this._session)
Object.assign(setupHeader, { Session: this._session }); // not used on first SETUP
setupRes = await this.request("SETUP", setupHeader, streamurl);
} else {
throw new Error(
`Connection parameter to RTSPClient#connect is ${connection}, not udp or tcp!`
);
}
if (!setupRes) {
throw new Error(
"No SETUP response; RTSP server is broken (sanity check)"
);
}
const { headers } = setupRes;
if (!headers.Transport) {
throw new Error(
"No Transport header on SETUP; RTSP server is broken (sanity check)"
);
}
const transport = parseTransport(headers.Transport);
if (
transport.protocol !== "RTP/AVP/TCP" &&
transport.protocol !== "RTP/AVP"
) {
throw new Error(
"Only RTSP servers supporting RTP/AVP(unicast) or RTP/ACP/TCP are supported at this time."
);
}
if (headers.Unsupported) {
this._unsupportedExtensions = headers.Unsupported.split(",");
}
if (headers.Session) {
this._session = headers.Session.split(";")[0];
}
let detail = {
codec,
mediaSource,
transport: transport.parameters,
isH264: codec === "H264",
rtpChannel,
rtcpChannel,
};
details.push(detail);
} // end if (needSetup)
} // end for loop, looping over each media stream
if (keepAlive) {
// Start a Timer to send OPTIONS every 20 seconds to keep stream alive
// using the Session ID
this._keepAliveID = setInterval(() => {
this.request("OPTIONS", { Session: this._session });
// this.request("OPTIONS");
}, 20 * 1000);
}
return details;
}
request(
requestName: string,
headersParam: Headers = {},
url?: string
): Promise<{ headers: Headers; mediaHeaders?: string[] } | void> {
if (!this._client) {
return Promise.resolve();
}
const id = ++this._cSeq;
// mutable via string addition
let req = `${requestName} ${url || this._url} RTSP/1.0\r\nCSeq: ${id}\r\n`;
const headers = {
...this.headers,
...headersParam,
};
req += Object.entries(headers)
.map(([key, value]) => `${key}: ${value}\r\n`)
.join("");
this.emit("log", req, "C->S");
// Make sure to add an empty line after the request.
this._client.write(`${req}\r\n`);
return new Promise((resolve, reject) => {
const responseHandler = (
responseName: string,
resHeaders: Headers,
mediaHeaders: string[]
) => {
const firstAnswer: string = String(resHeaders[""]) || "";
if (firstAnswer.indexOf("401") >= 0 && id > 2) {
reject(new Error(`Bad RTSP credentials!`));
return;
}
if (resHeaders.CSeq !== id) {
return;
}
this.removeListener("response", responseHandler);
const statusCode = parseInt(responseName.split(" ")[1]);
if (statusCode === STATUS_OK) {
if (!!mediaHeaders.length) {
resolve({
headers: resHeaders,
mediaHeaders,
});
} else {
resolve({
headers: resHeaders,
});
}
} else {
const authHeader = resHeaders[WWW_AUTH];
// We have status code unauthenticated.
if (statusCode === STATUS_UNAUTH && authHeader) {
const type = authHeader.split(" ")[0];
// Get auth properties from WWW_AUTH header.
let realm: string = "";
let nonce: string = "";
let match = WWW_AUTH_REGEX.exec(authHeader);
while (match != null) {
const prop = match[1];
if (prop == "realm" && match[2]) {
realm = match[2];
}
if (prop == "nonce" && match[2]) {
nonce = match[2];
}
match = WWW_AUTH_REGEX.exec(authHeader);
}
// mutable, corresponds to Authorization header
let authString = "";
if (type === "Digest") {
// Digest Authentication
const ha1 = getMD5Hash(
`${this.username}:${realm}:${this.password}`
);
const ha2 = getMD5Hash(`${requestName}:${this._url}`);
const ha3 = getMD5Hash(`${ha1}:${nonce}:${ha2}`);
authString = `Digest username="${this.username}",realm="${realm}",nonce="${nonce}",uri="${this._url}",response="${ha3}"`;
} else if (type === "Basic") {
// Basic Authentication
// https://xkcd.com/538/
const b64 = new Buffer(
`${this.username}:${this.password}`
).toString("base64");
authString = `Basic ${b64}`;
}
Object.assign(headers, {
Authorization: authString,
});
resolve(this.request(requestName, headers, url));
return;
}
reject(new Error(`Bad RTSP status code ${statusCode}!`));
return;
}
};
this.on("response", responseHandler);
});
}
respond(status: string, headersParam: Headers = {}) {
if (!this._client) {
return;
}
// mutable via string addition
let res = `RTSP/1.0 ${status}\r\n`;
const headers = {
...this.headers,
...headersParam,
};
res += Object.entries(headers)
.map(([key, value]) => `${key}: ${value}\r\n`)
.join("");
this.emit("log", res, "C->S");
this._client.write(`${res}\r\n`);
}
async play() {
if (!this.isConnected) {
throw new Error("Client is not connected.");
}
await this.request("PLAY", { Session: this._session });
return this;
}
async pause() {
if (!this.isConnected) {
throw new Error("Client is not connected.");
}
await this.request("PAUSE", { Session: this._session });
return this;
}
async close(isImmediate: boolean = false) {
if (!this._client) {
return this;
}
if (!isImmediate) {
await this.request("TEARDOWN", {
Session: this._session,
});
}
this._client.end();
this.removeAllListeners("response");
if (this._keepAliveID) {
clearInterval(this._keepAliveID);
this._keepAliveID = 0;
}
this.isConnected = false;
this._cSeq = 0;
return this;
}
_onData(data: Buffer) {
let index = 0;
// $
const PACKET_START = 0x24;
// R
const RTSP_HEADER_START = 0x52;
// /n
const ENDL = 10;
while (index < data.length) {
// read RTP or RTCP packet
if (
this.readState == ReadStates.SEARCHING &&
data[index] == PACKET_START
) {
this.messageBytes = [data[index]];
index++;
this.readState = ReadStates.READING_RAW_PACKET_SIZE;
} else if (this.readState == ReadStates.READING_RAW_PACKET_SIZE) {
// accumulate bytes for $, channel and length
this.messageBytes.push(data[index]);
index++;
if (this.messageBytes.length == 4) {
this.rtspPacketLength =
(this.messageBytes[2] << 8) + this.messageBytes[3];
if (this.rtspPacketLength > 0) {
this.rtspPacket = new Buffer(this.rtspPacketLength);
this.rtspPacketPointer = 0;
this.readState = ReadStates.READING_RAW_PACKET;
} else {
this.readState = ReadStates.SEARCHING;
}
}
} else if (this.readState == ReadStates.READING_RAW_PACKET) {
this.rtspPacket[this.rtspPacketPointer++] = data[index];
index++;
if (this.rtspPacketPointer == this.rtspPacketLength) {
const packetChannel = this.messageBytes[1];
if ((packetChannel & 0x01) === 0) {
// even number
const packet = parseRTPPacket(this.rtspPacket);
this.emit("data", packetChannel, packet.payload, packet);
}
if ((packetChannel & 0x01) === 1) {
// odd number
const packet = parseRTCPPacket(this.rtspPacket);
this.emit("controlData", packetChannel, packet);
const receiver_report = this._emptyReceiverReport();
this._sendInterleavedData(packetChannel, receiver_report);
}
this.readState = ReadStates.SEARCHING;
}
// read response data
} else if (
this.readState == ReadStates.SEARCHING &&
data[index] == RTSP_HEADER_START
) {
// found the start of a RTSP rtsp_message
this.messageBytes = [data[index]];
index++;
this.readState = ReadStates.READING_RTSP_HEADER;
} else if (this.readState == ReadStates.READING_RTSP_HEADER) {
// Reading a RTSP message.
// Add character to the messageBytes
// Ignore /r (13) but keep /n (10)
if (data[index] != 13) {
this.messageBytes.push(data[index]);
}
index++;
// if we have two new lines back to back then we have a complete RTSP command,
// note we may still need to read the Content Payload (the body) e.g. the SDP
if (
this.messageBytes.length >= 2 &&
this.messageBytes[this.messageBytes.length - 2] == ENDL &&
this.messageBytes[this.messageBytes.length - 1] == ENDL
) {
// Parse the Header
const text = String.fromCharCode.apply(null, this.messageBytes);
const lines = text.split("\n");
this.rtspContentLength = 0;
this.rtspStatusLine = lines[0];
this.rtspHeaders = {};
lines.forEach((line) => {
const indexOf = line.indexOf(":");
if (indexOf !== line.length - 1) {
const key = line.substring(0, indexOf).trim();
const data = line.substring(indexOf + 1).trim();
this.rtspHeaders[key] =
key != "Session" && data.match(/^[0-9]+$/)
? parseInt(data, 10)
: data;
// workaround for buggy Hipcam RealServer/V1.0 camera which returns Content-length and not Content-Length
if (key.toLowerCase() == "content-length") {
this.rtspContentLength = parseInt(data, 10);
}
}
});
// if no content length, there there's no media headers
// emit the message
if (!this.rtspContentLength) {
this.emit("log", text, "S->C");
this.emit("response", this.rtspStatusLine, this.rtspHeaders, []);
this.readState = ReadStates.SEARCHING;
} else {
this.messageBytes = [];
this.readState = ReadStates.READING_RTSP_PAYLOAD;
}
}
} else if (
this.readState == ReadStates.READING_RTSP_PAYLOAD &&
this.messageBytes.length < this.rtspContentLength
) {
// Copy data into the RTSP payload
this.messageBytes.push(data[index]);
index++;
if (this.messageBytes.length == this.rtspContentLength) {
const text = String.fromCharCode.apply(null, this.messageBytes);
const mediaHeaders = text.split("\n");
// Emit the RTSP message
this.emit(
"log",
String.fromCharCode.apply(null, this.messageBytes) + text,
"S->C"
);
this.emit(
"response",
this.rtspStatusLine,
this.rtspHeaders,
mediaHeaders
);
this.readState = ReadStates.SEARCHING;
}
} else {
// unexpected data
throw new Error(
"Bug in RTSP data framing, please file an issue with the author with stacktrace."
);
}
} // end while
}
_sendInterleavedData(channel: number, buffer: Buffer) {
if (!this._client) {
return;
}
const req = `${buffer.length} bytes of interleaved data on channel ${channel}`;
this.emit("log", req, "C->S");
const header = new Buffer(4);
header[0] = 0x24; // ascii $
header[1] = channel;
header[2] = (buffer.length >> 8) & 0xff;
header[3] = (buffer.length >> 0) & 0xff;
const data = Buffer.concat([header, buffer]);
this._client.write(data);
}
_sendUDPData(host: string, port: number, buffer: Buffer) {
var udp = dgram.createSocket("udp4");
udp.send(buffer, 0, buffer.length, port, host, (err, bytes) => {
// TODO: Don't ignore errors.
udp.close();
});
}
_emptyReceiverReport(): Buffer {
const report = new Buffer(8);
const version = 2;
const paddingBit = 0;
const reportCount = 0; // an empty report
const packetType = 201; // Receiver Report
const length = report.length / 4 - 1; // num 32 bit words minus 1
report[0] = (version << 6) + (paddingBit << 5) + reportCount;
report[1] = packetType;
report[2] = (length >> 8) & 0xff;
report[3] = (length >> 0) & 0xff;
report[4] = (this.clientSSRC >> 24) & 0xff;
report[5] = (this.clientSSRC >> 16) & 0xff;
report[6] = (this.clientSSRC >> 8) & 0xff;
report[7] = (this.clientSSRC >> 0) & 0xff;
return report;
}
}
export { RTPPacket, RTCPPacket } from "./util"; | the_stack |
import {
addType,
emptyObject,
flatten,
indexAccess,
isMember,
isOptional,
isType,
isTypeIncluded,
ReflectionKind,
Type,
TypeAny,
TypeInfer,
TypeLiteral,
TypeMethod,
TypeMethodSignature,
TypeNumber,
TypeParameter,
TypeString,
TypeTemplateLiteral,
TypeTuple,
TypeUnion
} from './type.js';
import { isPrototypeOfBase } from '@deepkit/core';
import { typeInfer } from './processor.js';
type AssignableType = Type | string | boolean | number | symbol | bigint | undefined | null;
type StackEntry = {
left: Type,
right: Type,
}
function hasStack(extendStack: StackEntry[], left: Type, right: Type): boolean {
for (const entry of extendStack) {
if (entry.left === left && entry.right === right) return true;
}
return false;
}
/**
* The check of `extends` in Typescript. This function can be read as `left extends right`.
*
* See https://www.typescriptlang.org/docs/handbook/type-compatibility.html#any-unknown-object-void-undefined-null-and-never-assignability
* This algo follows strict mode.
*/
export function isExtendable(leftValue: AssignableType, rightValue: AssignableType, extendStack: StackEntry[] = []): boolean {
const right: Type = isType(rightValue) ? rightValue : typeInfer(rightValue);
const left: Type = isType(leftValue) ? leftValue : typeInfer(leftValue);
if (hasStack(extendStack, left, right)) return true;
try {
extendStack.push({ left, right });
if (right === left) return true;
if (left.kind === ReflectionKind.infer) {
left.set(right as Type);
return true;
}
if (right.kind === ReflectionKind.infer) {
right.set(left as Type);
return true;
}
if (right.kind === ReflectionKind.any || right.kind === ReflectionKind.unknown) return true;
if (left.kind === ReflectionKind.promise && right.kind === ReflectionKind.promise) return isExtendable(left.type, right.type);
if (right.kind !== ReflectionKind.union) {
if (left.kind === ReflectionKind.null) {
return right.kind === ReflectionKind.null;
}
if (left.kind === ReflectionKind.undefined) {
return right.kind === ReflectionKind.void || right.kind === ReflectionKind.undefined;
}
if (left.kind === ReflectionKind.void) {
return right.kind === ReflectionKind.void;
}
if (left.kind === ReflectionKind.any) {
return right.kind !== ReflectionKind.never;
}
if (left.kind === ReflectionKind.object) {
return right.kind === ReflectionKind.object
|| (right.kind === ReflectionKind.objectLiteral && right.types.length === 0)
|| (right.kind === ReflectionKind.class && right.types.length === 0);
}
if (left.kind === ReflectionKind.objectLiteral && left.types.length === 0) {
return right.kind === ReflectionKind.object
|| (right.kind === ReflectionKind.objectLiteral && right.types.length === 0)
|| (right.kind === ReflectionKind.class && right.types.length === 0);
}
}
if (left.kind === ReflectionKind.never) return true;
if (right.kind === ReflectionKind.never) return false;
if (left.kind === ReflectionKind.literal && right.kind === ReflectionKind.literal) return left.literal === right.literal;
if (left.kind === ReflectionKind.string && right.kind === ReflectionKind.string) return true;
if (left.kind === ReflectionKind.number && right.kind === ReflectionKind.number) return true;
if (left.kind === ReflectionKind.boolean && right.kind === ReflectionKind.boolean) return true;
if (left.kind === ReflectionKind.bigint && right.kind === ReflectionKind.bigint) return true;
if (left.kind === ReflectionKind.symbol && right.kind === ReflectionKind.symbol) return true;
if (left.kind === ReflectionKind.regexp && right.kind === ReflectionKind.regexp) return true;
if (left.kind === ReflectionKind.enum) {
if (right.kind === ReflectionKind.enum) {
if (left.values.length !== right.values.length) return false;
for (let i = 0; i < right.values.length; i++) {
if (left.values[i] !== right.values[i]) return false;
}
return true;
}
return false;
}
if (left.kind === ReflectionKind.literal) {
if ('string' === typeof left.literal && right.kind === ReflectionKind.string) return true;
if ('number' === typeof left.literal && right.kind === ReflectionKind.number) return true;
if ('boolean' === typeof left.literal && right.kind === ReflectionKind.boolean) return true;
if ('bigint' === typeof left.literal && right.kind === ReflectionKind.bigint) return true;
if ('symbol' === typeof left.literal && right.kind === ReflectionKind.symbol) return true;
if ('string' === typeof left.literal && right.kind === ReflectionKind.templateLiteral) {
return extendTemplateLiteral(left, right);
}
}
if (left.kind === ReflectionKind.templateLiteral) {
if (emptyObject(right)) return true;
if (right.kind === ReflectionKind.string) return true;
if (right.kind === ReflectionKind.literal) {
if (right.literal === '') return false;
return extendTemplateLiteral(left, { kind: ReflectionKind.templateLiteral, types: [right] });
}
if (right.kind === ReflectionKind.templateLiteral) {
return extendTemplateLiteral(left, right);
}
}
if (left.kind === ReflectionKind.function && right.kind === ReflectionKind.function && left.function && left.function === right.function) return true;
if ((left.kind === ReflectionKind.function || left.kind === ReflectionKind.method || left.kind === ReflectionKind.methodSignature) &&
(right.kind === ReflectionKind.function || right.kind === ReflectionKind.method || right.kind === ReflectionKind.methodSignature || right.kind === ReflectionKind.objectLiteral)
) {
if (right.kind === ReflectionKind.objectLiteral) {
//todo: members maybe contain a call signature
return true;
}
if (right.kind === ReflectionKind.function || right.kind === ReflectionKind.methodSignature || right.kind === ReflectionKind.method) {
const returnValid = isExtendable(left.return, right.return, extendStack);
if (!returnValid) return false;
return isFunctionParameterExtendable(left, right, extendStack);
}
return false;
}
if ((left.kind === ReflectionKind.propertySignature || left.kind === ReflectionKind.property) && (right.kind === ReflectionKind.propertySignature || right.kind === ReflectionKind.property)) {
return isExtendable(left.type, right.type, extendStack);
}
if ((left.kind === ReflectionKind.class || left.kind === ReflectionKind.objectLiteral) && right.kind === ReflectionKind.function && right.name === 'new') {
const leftConstructor = (left.types as Type[]).find(v => (v.kind === ReflectionKind.method && v.name === 'constructor') || (v.kind === ReflectionKind.methodSignature && v.name === 'new'));
const valid = isExtendable(right, leftConstructor || { kind: ReflectionKind.function, parameters: [], return: { kind: ReflectionKind.any } }, extendStack);
return valid;
}
if ((left.kind === ReflectionKind.class || left.kind === ReflectionKind.objectLiteral) && (right.kind === ReflectionKind.object || (right.kind === ReflectionKind.objectLiteral && right.types.length === 0))) {
return true;
}
if ((left.kind === ReflectionKind.class || left.kind === ReflectionKind.objectLiteral) && (right.kind === ReflectionKind.objectLiteral || right.kind === ReflectionKind.class)) {
const rightConstructor = (right.types as Type[]).find(v => (v.kind === ReflectionKind.methodSignature && v.name === 'new')) as TypeMethodSignature | undefined;
if (left.kind === ReflectionKind.class && rightConstructor) {
//if rightConstructor is set then its maybe something like:
// `class {} extends {new (...args: []) => infer T} ? T : never`
//check if parameters are compatible
const leftConstructor = left.types.find(v => (v.kind === ReflectionKind.method && v.name === 'constructor')) as TypeMethod | undefined;
if (leftConstructor) {
if (!isFunctionParameterExtendable(leftConstructor, rightConstructor, extendStack)) {
return false;
}
}
return isExtendable(left, rightConstructor.return, extendStack);
}
for (const member of right.types) {
//todo: call signature
//todo: index signatures
if (isMember(member)) {
if (member.name === 'constructor') continue;
const leftMember = (left.types as Type[]).find(v => isMember(v) && v.name === member.name);
if (!leftMember) return false;
if (!isExtendable(leftMember, member, extendStack)) {
return false;
}
}
}
if (left.kind === ReflectionKind.class && right.kind === ReflectionKind.class && left.types.length === 0 && right.types.length === 0) {
//class User extends Base {}
//User extends Base = true
if (left.classType === right.classType) return true;
return isPrototypeOfBase(left.classType, right.classType);
}
return true;
}
if (left.kind === ReflectionKind.array && right.kind === ReflectionKind.array) {
return isExtendable(left.type, right.type, extendStack);
}
if (left.kind === ReflectionKind.tuple && right.kind === ReflectionKind.array) {
const tupleUnion: TypeUnion = { kind: ReflectionKind.union, types: [] };
for (const member of left.types) {
if (member.optional && isTypeIncluded(tupleUnion.types, { kind: ReflectionKind.undefined })) tupleUnion.types.push({ kind: ReflectionKind.undefined });
const type = member.type.kind === ReflectionKind.rest ? member.type.type : member.type;
if (isTypeIncluded(tupleUnion.types, type)) tupleUnion.types.push(type);
}
return isExtendable(tupleUnion, right, extendStack);
}
if (left.kind === ReflectionKind.array && right.kind === ReflectionKind.tuple) {
const hasRest = right.types.some(v => v.type.kind === ReflectionKind.rest);
if (!hasRest && (left.type.kind !== ReflectionKind.union || !isOptional(left.type))) return false;
for (const member of right.types) {
let type = member.type.kind === ReflectionKind.rest ? member.type.type : member.type;
if (member.optional) type = flatten({ kind: ReflectionKind.union, types: [{ kind: ReflectionKind.undefined }, type] });
if (!isExtendable(left.type, type, extendStack)) return false;
}
return true;
}
if (left.kind === ReflectionKind.tuple && right.kind === ReflectionKind.tuple) {
for (let i = 0; i < right.types.length; i++) {
const rightType = indexAccess(right, { kind: ReflectionKind.literal, literal: i });
const leftType = indexAccess(left, { kind: ReflectionKind.literal, literal: i });
if (rightType.kind === ReflectionKind.infer || leftType.kind === ReflectionKind.infer) continue;
const valid = isExtendable(leftType, rightType, extendStack);
if (!valid) return false;
}
inferFromTuple(left, right);
return true;
}
if (left && left.kind === ReflectionKind.union) return left.types.every(v => isExtendable(v, rightValue, extendStack));
if (right.kind === ReflectionKind.union) return right.types.some(v => isExtendable(leftValue, v, extendStack));
return false;
} finally {
// extendStack.pop();
}
}
export function parametersToTuple(parameters: TypeParameter[]): TypeTuple {
const tuple = {
kind: ReflectionKind.tuple,
types: []
} as TypeTuple;
for (const v of parameters) {
tuple.types.push({ kind: ReflectionKind.tupleMember, parent: tuple, name: v.name, optional: (v.optional || v.default !== undefined) ? true : undefined, type: v.type });
}
return tuple;
}
function isFunctionParameterExtendable(left: { parameters: TypeParameter[] }, right: { parameters: TypeParameter[] }, extendStack: StackEntry[]): boolean {
//convert parameters to tuple and just compare that, as it's the same algorithm
const leftTuple: TypeTuple = parametersToTuple(left.parameters);
const rightTuple: TypeTuple = parametersToTuple(right.parameters);
//we have to change the position here since its type assignability is inversed to tuples rules
// true for tuple: [a: string] extends [a: string, b: string]
// false for function: (a: string) extends (a: string, b: string)
const valid = isExtendable(rightTuple, leftTuple, extendStack);
if (valid) {
inferFromTuple(leftTuple, rightTuple);
}
return valid;
}
export function extendTemplateLiteral(left: TypeLiteral | TypeTemplateLiteral, right: TypeTemplateLiteral): boolean {
interface ReadQueueItem {
type: TypeString | TypeNumber | TypeLiteral | TypeAny;
position: number;
next?: ReadQueueItem;
}
let matchQueue: (TypeInfer | TypeNumber | TypeString | TypeAny)[] = [];
let current = (left.kind === ReflectionKind.literal ? { type: left as (TypeLiteral & { literal: string }), position: 0 } : {
type: left.types[0],
position: 0
}) as ReadQueueItem | undefined;
if (current && left.kind === ReflectionKind.templateLiteral) {
for (let i = 1; i < left.types.length; i++) {
const t = left.types[i];
if (t.kind === ReflectionKind.infer) continue;
current.next = { type: t, position: 0 };
}
}
function search(delimiter: string): ReadQueueItem | undefined {
let result = current;
while (result) {
if (result.type.kind === ReflectionKind.literal) {
const value = result.type.literal as string;
if (value !== '') {
const position = value.indexOf(delimiter, result.position);
if (position !== -1) {
return { ...result, position: position };
}
}
//go next
}
result = result.next;
}
//not found
return;
}
function handleQueue(end?: ReadQueueItem): boolean {
if (matchQueue.length === 0) return true;
const last = matchQueue[matchQueue.length - 1];
for (const item of matchQueue) {
const isLast = item === last;
if (!isLast) {
//pick only one character
while (current) {
if (current.type.kind === ReflectionKind.literal) {
const value = current.type.literal as string;
if (current.position === value.length) {
//end, go next
current = current.next;
continue;
}
const char = value[current.position++];
if (item.kind === ReflectionKind.number) {
if (value === '' || isNaN(+char)) return false;
} else if (item.kind === ReflectionKind.infer) {
item.set({ kind: ReflectionKind.literal, literal: char });
}
} else if (current.type.kind === ReflectionKind.string) {
if (item.kind === ReflectionKind.number) {
return false;
} else if (item.kind === ReflectionKind.infer) {
item.set(current.type);
}
} else if (current.type.kind === ReflectionKind.any) {
if (item.kind === ReflectionKind.infer) {
item.set(current.type);
}
} else if (current.type.kind === ReflectionKind.number) {
if (item.kind === ReflectionKind.infer) {
item.set(current.type);
}
}
break;
}
} else {
if (item.kind === ReflectionKind.any || item.kind === ReflectionKind.string || item.kind === ReflectionKind.infer) {
const result: TypeTemplateLiteral = { kind: ReflectionKind.templateLiteral, types: [] };
while (current) {
if (current.type.kind === ReflectionKind.literal) {
const value = current.type.literal as string;
if (current.position === value.length) {
//end, go next
current = current.next;
continue;
}
const v = value.slice(current.position, end ? end.position : undefined);
result.types.push({ kind: ReflectionKind.literal, literal: v });
} else {
result.types.push(current.type);
// if (item.kind === ReflectionKind.infer) {
// item.set(current.type);
// }
}
if (end && current.type === end.type) break;
current = current.next;
}
if (item.kind === ReflectionKind.infer) {
if (result.types.length === 1) {
item.set(result.types[0]);
} else {
item.set(result);
}
}
} else if (item.kind === ReflectionKind.number) {
//read until no number
let value = '';
while (current) {
if (current.type.kind === ReflectionKind.literal) {
const v = (current.type.literal as string).slice(current.position, end ? end.position : undefined);
value += v;
} else if (current.type.kind === ReflectionKind.number || current.type.kind === ReflectionKind.any) {
//number is fine
} else {
//string is not fine as it can contain characters not compatible to number
return false;
}
current = current.next;
}
if (value === '' || isNaN(+value)) return false;
}
}
}
matchQueue = [];
return true;
}
for (const span of right.types) {
if (span.kind === ReflectionKind.literal) {
const position = search(span.literal as string);
if (!position) return false;
if (!handleQueue(position)) return false;
current = { ...position, position: position.position + (span.literal as string).length };
} else if (span.kind === ReflectionKind.infer) {
matchQueue.push(span);
} else if (span.kind === ReflectionKind.string) {
matchQueue.push(span);
} else if (span.kind === ReflectionKind.number) {
matchQueue.push(span);
}
}
if (!handleQueue()) return false;
return true;
}
function inferFromTuple(left: TypeTuple, right: TypeTuple) {
//when all types match, we find `infer`
for (let i = 0; i < right.types.length; i++) {
const rightType = right.types[i];
if (rightType.type.kind === ReflectionKind.infer || (rightType.type.kind === ReflectionKind.rest && rightType.type.type.kind === ReflectionKind.infer)) {
const inferred: TypeTuple = { kind: ReflectionKind.tuple, types: [] };
let restAdded = false;
for (let j = 0; j < left.types.length; j++) {
const leftType = left.types[j];
if (leftType.type.kind === ReflectionKind.rest) {
addType(inferred, leftType);
restAdded = true; //when a rest element is added, all subsequent types will be added as well
} else if (restAdded || j >= i) {
addType(inferred, leftType);
}
}
let inferredType: Type = inferred.types.length === 1 ? inferred.types[0] : inferred.types.length === 0 ? { kind: ReflectionKind.never } : inferred;
if (inferredType.kind === ReflectionKind.tupleMember) inferredType = inferredType.type;
if (inferredType.kind === ReflectionKind.rest) inferredType = { kind: ReflectionKind.array, type: inferredType.type };
if (rightType.type.kind === ReflectionKind.infer) {
rightType.type.set(inferredType);
} else if (rightType.type.kind === ReflectionKind.rest && rightType.type.type.kind === ReflectionKind.infer) {
rightType.type.type.set(inferredType);
}
}
}
} | the_stack |
import { ScatterPlotChart } from "../scatterCharts/ScatterPlotChart"
import {
SampleColumnSlugs,
SynthesizeFruitTable,
SynthesizeFruitTableWithNonPositives,
SynthesizeGDPTable,
} from "../../coreTable/OwidTableSynthesizers"
import { ScatterPlotManager } from "./ScatterPlotChartConstants"
import {
EntitySelectionMode,
ScaleType,
ScatterPointLabelStrategy,
} from "../core/GrapherConstants"
import { OwidTable } from "../../coreTable/OwidTable"
import { ErrorValueTypes } from "../../coreTable/ErrorValues"
import { ColumnTypeNames } from "../../coreTable/CoreColumnDef"
import { ContinentColors } from "../color/ColorConstants"
import { OwidTableSlugs } from "../../coreTable/OwidTableConstants"
import { Color } from "../../coreTable/CoreTableConstants"
import { makeOriginalTimeSlugFromColumnSlug } from "../../coreTable/OwidTableUtil"
import { uniq, uniqBy } from "../../clientUtils/Util"
it("can create a new chart", () => {
const manager: ScatterPlotManager = {
table: SynthesizeGDPTable(),
}
const chart = new ScatterPlotChart({ manager })
expect(chart.failMessage).toBeFalsy()
expect(chart.getSeriesNamesToShow().size).toEqual(2)
expect(chart.series.length).toEqual(2)
expect(chart.allPoints.length).toBeGreaterThan(0)
})
it("shows error when X or Y columns are missing", () => {
const manager: ScatterPlotManager = {
table: new OwidTable([
["entityId", "entityName", "entityCode", "year"],
[1, "World", undefined, 2020],
]),
}
const chart = new ScatterPlotChart({ manager })
expect(chart.failMessage).toBeTruthy()
})
it("doesn't show 'No data' bin when there is no color column", () => {
const manager: ScatterPlotManager = {
table: SynthesizeGDPTable(),
colorColumnSlug: undefined,
}
const chart = new ScatterPlotChart({ manager })
expect(chart.failMessage).toBeFalsy()
expect(chart.hasNoDataBin).toBeFalsy()
})
it("can remove points outside domain", () => {
const manager: ScatterPlotManager = {
table: SynthesizeFruitTable(undefined, 2),
}
const chart = new ScatterPlotChart({ manager })
const initialCount = chart.allPoints.length
manager.xAxisConfig = { removePointsOutsideDomain: true, max: 1100 }
expect(chart.allPoints.length).toBeGreaterThan(0)
expect(chart.allPoints.length).toBeLessThan(initialCount)
})
it("can filter points with negative values when using a log scale", () => {
const table = SynthesizeFruitTableWithNonPositives(
{
entityCount: 2,
timeRange: [1900, 2000],
},
20,
1
)
const manager: ScatterPlotManager = {
table,
yColumnSlug: SampleColumnSlugs.Fruit,
xColumnSlug: SampleColumnSlugs.Vegetables,
selection: table.availableEntityNames,
yAxisConfig: {},
xAxisConfig: {},
}
const chart = new ScatterPlotChart({ manager })
expect(chart.series.length).toEqual(2)
expect(chart.allPoints.length).toEqual(200)
const logScaleManager = {
...manager,
yAxisConfig: {
scaleType: ScaleType.log,
},
xAxisConfig: {
scaleType: ScaleType.log,
},
}
const logChart = new ScatterPlotChart({ manager: logScaleManager })
expect(logChart.dualAxis.horizontalAxis.domain[0]).toBeGreaterThan(0)
expect(logChart.dualAxis.verticalAxis.domain[0]).toBeGreaterThan(0)
expect(logChart.series.length).toEqual(2)
expect(logChart.allPoints.length).toEqual(180)
})
describe("interpolation defaults", () => {
const table = new OwidTable(
[
[
"entityId",
"entityName",
"entityCode",
"year",
"x",
"y",
"color",
"size",
],
[1, "UK", "", -1000, 1, 1, null, null],
[1, "UK", "", 1000, 1, 1, "Europe", 100],
[1, "UK", "", 2020, 1, 1, null, null],
],
[
{ slug: "x", type: ColumnTypeNames.Numeric },
{ slug: "y", type: ColumnTypeNames.Numeric },
{ slug: "color", type: ColumnTypeNames.String },
{
slug: "size",
type: ColumnTypeNames.Numeric,
display: { tolerance: 1 },
},
]
)
const manager: ScatterPlotManager = {
xColumnSlug: "x",
yColumnSlug: "y",
colorColumnSlug: "color",
sizeColumnSlug: "size",
table,
}
const chart = new ScatterPlotChart({ manager })
it("color defaults to infinity tolerance if none specified", () => {
expect(
chart.transformedTable.get("color").valuesIncludingErrorValues
).toEqual(["Europe", "Europe", "Europe"])
})
it("size defaults to infinity tolerance regardless if one specified", () => {
expect(
chart.transformedTable.get("size").valuesIncludingErrorValues
).toEqual([100, 100, 100])
})
})
describe("basic scatterplot", () => {
const table = new OwidTable(
[
[
"entityId",
"entityName",
"entityCode",
"year",
"x",
"y",
"color",
"size",
],
[1, "UK", "", 2000, 1, 1, null, null],
[1, "UK", "", 2001, null, 1, "Europe", 100],
[1, "UK", "", 2002, 1, null, null, null],
[1, "UK", "", 2003, null, null, null, null],
[2, "USA", "", 2000, 1, 1, null, null],
],
[
{ slug: "x", type: ColumnTypeNames.Numeric },
{ slug: "y", type: ColumnTypeNames.Numeric },
{
slug: "color",
type: ColumnTypeNames.String,
display: { tolerance: 1 },
},
{
slug: "size",
type: ColumnTypeNames.Numeric,
},
]
)
const manager: ScatterPlotManager = {
xColumnSlug: "x",
yColumnSlug: "y",
colorColumnSlug: "color",
sizeColumnSlug: "size",
table,
}
const chart = new ScatterPlotChart({ manager })
it("removes error values from X and Y", () => {
expect(chart.transformedTable.numRows).toEqual(2)
expect(chart.transformedTable.timeColumn.uniqValues).toEqual([2000])
})
it("interpolates color & size columns before removing rows", () => {
const ukTable = chart.transformedTable.where({ entityName: "UK" })
expect(ukTable.get("color").valuesIncludingErrorValues).toEqual([
"Europe",
])
expect(ukTable.get("size").valuesIncludingErrorValues).toEqual([100])
})
it("color & size interpolation doesn't leak", () => {
const usTable = chart.transformedTable.where({ entityName: "USA" })
expect(usTable.get("color").valuesIncludingErrorValues).toEqual([
ErrorValueTypes.NoValueWithinTolerance,
])
expect(usTable.get("size").valuesIncludingErrorValues).toEqual([
ErrorValueTypes.NoValueWithinTolerance,
])
})
it("shows 'No data' bin", () => {
expect(chart.hasNoDataBin).toEqual(true)
})
it("plots correct series", () => {
expect(chart.series).toEqual([
{
color: ContinentColors.Africa, // First "continents" color
isScaleColor: true,
label: "UK",
points: [
{
color: "Europe",
entityName: "UK",
label: "2000",
size: 100,
time: {
x: 2000,
y: 2000,
},
timeValue: 2000,
x: 1,
y: 1,
},
],
seriesName: "UK",
size: 100,
},
{
color: chart.defaultNoDataColor,
isScaleColor: true,
label: "USA",
points: [
{
color: undefined,
entityName: "USA",
label: "2000",
size: 0,
time: {
x: 2000,
y: 2000,
},
timeValue: 2000,
x: 1,
y: 1,
},
],
seriesName: "USA",
size: 0,
},
])
})
})
describe("label point strategies", () => {
const table = new OwidTable(
[
[
"entityId",
"entityName",
"entityCode",
"year",
"x",
"y",
"color",
"size",
],
[1, "UK", "", 2000, 1, 2, null, null],
],
[
{ slug: "x", type: ColumnTypeNames.Numeric },
{ slug: "y", type: ColumnTypeNames.Numeric },
{
slug: "color",
type: ColumnTypeNames.String,
},
{
slug: "size",
type: ColumnTypeNames.Numeric,
},
]
)
const manager: ScatterPlotManager = {
xColumnSlug: "x",
yColumnSlug: "y",
colorColumnSlug: "color",
sizeColumnSlug: "size",
table,
}
it("year", () => {
const chart = new ScatterPlotChart({
manager: {
...manager,
scatterPointLabelStrategy: ScatterPointLabelStrategy.year,
},
})
expect(chart.allPoints[0].label).toEqual("2000")
})
it("y", () => {
const chart = new ScatterPlotChart({
manager: {
...manager,
scatterPointLabelStrategy: ScatterPointLabelStrategy.y,
},
})
expect(chart.allPoints[0].label).toEqual("2")
})
it("x", () => {
const chart = new ScatterPlotChart({
manager: {
...manager,
scatterPointLabelStrategy: ScatterPointLabelStrategy.x,
},
})
expect(chart.allPoints[0].label).toEqual("1")
})
})
it("assigns entity colors to series, overriding colorScale color", () => {
const table = new OwidTable(
[
[
"entityId",
"entityName",
"entityCode",
"year",
"x",
"y",
"color",
"size",
OwidTableSlugs.entityColor,
],
[1, "UK", "", 2000, 1, 2, "Europe", null, "#ccc"],
],
[
{ slug: "x", type: ColumnTypeNames.Numeric },
{ slug: "y", type: ColumnTypeNames.Numeric },
{
slug: "color",
type: ColumnTypeNames.String,
},
{
slug: "size",
type: ColumnTypeNames.Numeric,
},
]
)
const manager: ScatterPlotManager = {
xColumnSlug: "x",
yColumnSlug: "y",
colorColumnSlug: "color",
sizeColumnSlug: "size",
table,
}
const chart = new ScatterPlotChart({ manager })
expect(chart.series[0].color).toEqual("#ccc")
})
describe("entity exclusion", () => {
const table = new OwidTable(
[
[
"entityId",
"entityName",
"entityCode",
"year",
"x",
"y",
"color",
"size",
],
[1, "UK", "", 2000, 1, 1, null, null],
[1, "UK", "", 2001, null, 1, "Europe", 100],
[1, "UK", "", 2002, 1, null, null, null],
[1, "UK", "", 2003, null, null, null, null],
[2, "USA", "", 2000, 1, 1, null, null],
],
[
{ slug: "x", type: ColumnTypeNames.Numeric },
{ slug: "y", type: ColumnTypeNames.Numeric },
{
slug: "color",
type: ColumnTypeNames.String,
display: { tolerance: 1 },
},
{
slug: "size",
type: ColumnTypeNames.Numeric,
},
]
)
const manager: ScatterPlotManager = {
xColumnSlug: "x",
yColumnSlug: "y",
colorColumnSlug: "color",
sizeColumnSlug: "size",
matchingEntitiesOnly: true,
table,
}
const chart = new ScatterPlotChart({ manager })
it("excludes entities without color when matchingEntitiesOnly is enabled", () => {
expect(chart.allPoints.length).toEqual(1)
expect(chart.allPoints[0].entityName).toEqual("UK")
})
it("doesn't show No data bin", () => {
expect(chart.hasNoDataBin).toEqual(false)
})
})
describe("colors & legend", () => {
const table = new OwidTable(
[
[
"entityId",
"entityName",
"entityCode",
"year",
"x",
"y",
"color",
"size",
],
[1, "Germany", "", 2001, 1, 1, "Europe", null],
[2, "Canada", "", 2000, 1, 1, "North America", null],
[3, "China", "", 2000, 1, null, "Asia", null],
[4, "Australia", "", 2000, 1, 1, "Oceania", null],
[5, "Antarctica", "", 2000, null, null, "Antarctica", null],
[6, "Chile", "", 2000, 1, 1, "South America", null],
[7, "Nigeria", "", 2000, 1, 1, "Africa", null],
],
[
{ slug: "x", type: ColumnTypeNames.Numeric },
{ slug: "y", type: ColumnTypeNames.Numeric },
{
slug: "color",
type: ColumnTypeNames.String,
display: { tolerance: 1 },
},
{
slug: "size",
type: ColumnTypeNames.Numeric,
},
]
)
const tableWithoutChina = table.columnFilter(
"entityName",
(name) => name !== "China",
"filter out China"
)
const manager: ScatterPlotManager = {
xColumnSlug: "x",
yColumnSlug: "y",
colorColumnSlug: "color",
sizeColumnSlug: "size",
table,
tableAfterAuthorTimelineAndActiveChartTransformAndPopulationFilter:
tableWithoutChina,
}
const chart = new ScatterPlotChart({ manager })
it("assigns correct continent colors", () => {
// Every series color is the same as the point color
chart.series.forEach((series) => {
const seriesNameToContinent: { [key: string]: string } = {
Germany: "Europe",
Canada: "North America",
China: "Asia",
Australia: "Oceania",
Antarctica: "Antarctica",
Chile: "South America",
Nigeria: "Africa",
}
const continentColors: { [key: string]: Color } = {
...ContinentColors,
}
expect(series.color).toEqual(
continentColors[seriesNameToContinent[series.seriesName]]
)
for (const seriesName in seriesNameToContinent) {
const continentName = seriesNameToContinent[seriesName]
const continentColor = continentColors[continentName]
expect(chart.colorScale.getColor(continentName)).toEqual(
continentColor
)
}
})
})
it("legend contains every continent for which there is data (before timeline filter)", () => {
expect(chart.legendItems.map((item) => item.label).sort()).toEqual([
"Africa",
"Antarctica",
"Europe",
"North America",
"Oceania",
"South America",
])
})
it("legend items faint if without points for current timeline selection", () => {
expect(chart.activeColors.sort()).toEqual(
[
ContinentColors.Africa,
ContinentColors.Europe,
ContinentColors["North America"],
ContinentColors.Oceania,
ContinentColors["South America"],
].sort()
)
})
})
describe("series transformations", () => {
const table = new OwidTable(
[
[
"entityId",
"entityName",
"entityCode",
"year",
"x",
"y",
"color",
"size",
],
[1, "UK", "", 2001, 1, 1, null, null],
[1, "UK", "", 2004, 2, 1, null, null],
[1, "UK", "", 2002, null, 1, null, null],
[1, "UK", "", 2000, 1, null, null, null],
[1, "UK", "", 2003, 2, 1, null, null],
[2, "Germany", "", 2000, 1, 1, null, null],
[2, "Germany", "", 2003, 2, 2, null, null],
[3, "USA", "", 2001, 0, 0, null, null],
[3, "USA", "", 2002, 1, 1, null, null],
[3, "USA", "", 2003, 2, 2, null, null],
],
[
{
slug: "x",
type: ColumnTypeNames.Numeric,
},
{
slug: "y",
type: ColumnTypeNames.Numeric,
},
{ slug: "color", type: ColumnTypeNames.String },
{
slug: "size",
type: ColumnTypeNames.Numeric,
display: { tolerance: 1 },
},
]
)
const manager: ScatterPlotManager = {
xColumnSlug: "x",
yColumnSlug: "y",
colorColumnSlug: "color",
sizeColumnSlug: "size",
table,
}
const chart = new ScatterPlotChart({ manager })
it("sorts points by time", () => {
const ukSeries = chart.series.find((s) => s.seriesName === "UK")!
expect(ukSeries.points.map((p) => p.timeValue)).toEqual([
2001, 2003, 2004,
])
})
it("endpointsOnly drops trailing and in-between points", () => {
const chart = new ScatterPlotChart({
manager: { ...manager, compareEndPointsOnly: true },
})
const ukSeries = chart.series.find((s) => s.seriesName === "UK")!
expect(ukSeries.points.map((p) => p.timeValue)).toEqual([2001, 2004])
})
it("hides entities without full time span", () => {
const chart = new ScatterPlotChart({
manager: {
...manager,
hideLinesOutsideTolerance: true,
startTime: 2000,
endTime: 2003,
},
})
// Because of the assumption that the timeline filter is applied,
// only Germany can be visible in this case.
expect(chart.series.map((s) => s.seriesName)).toEqual(["Germany"])
})
it("calculates average annual change", () => {
const chart = new ScatterPlotChart({
manager: {
...manager,
isRelativeMode: true,
},
})
const uk = chart.series.find((s) => s.seriesName === "UK")!.points[0]
const usa = chart.series.find((s) => s.seriesName === "USA")!.points[0]
const germany = chart.series.find((s) => s.seriesName === "Germany")!
.points[0]
expect(uk.x.toFixed(1)).toEqual("26.0")
expect(uk.y.toFixed(1)).toEqual("0.0")
// The initial USA point is dropped to avoid an Infinity result
expect(usa.x.toFixed(1)).toEqual("100.0")
expect(usa.y.toFixed(1)).toEqual("100.0")
expect(germany.x.toFixed(1)).toEqual("26.0")
expect(germany.y.toFixed(1)).toEqual("26.0")
})
})
describe("average annual change", () => {
const table = new OwidTable(
[
["entityName", "year", "x", "y", "color", "size"],
["UK", 2000, 1, 1, null, null],
["UK", 2001, null, 2, null, null],
// Using a 0 end value for Y to make sure we don't naively
// ignore all zero values, instead of start-only zeroes.
["UK", 2002, null, 0, null, null],
["UK", 2004, 16, null, null, null],
// intentionally creating two partial rows for USA that after
// interpolation turn into one duplicated row
["USA", 2000, 1, null, null, null],
["USA", 2001, null, 1, null, null],
],
[
{
slug: "x",
type: ColumnTypeNames.Numeric,
display: { tolerance: 3 },
},
{
slug: "y",
type: ColumnTypeNames.Numeric,
display: { tolerance: 3 },
},
{ slug: "color", type: ColumnTypeNames.String },
{
slug: "size",
type: ColumnTypeNames.Numeric,
display: { tolerance: 1 },
},
]
)
const manager: ScatterPlotManager = {
xColumnSlug: "x",
yColumnSlug: "y",
colorColumnSlug: "color",
sizeColumnSlug: "size",
isRelativeMode: true,
// Setting log axes to make sure they're ignored in relative mode
yAxisConfig: { scaleType: ScaleType.log },
xAxisConfig: { scaleType: ScaleType.log },
// intentionally setting compareEndPointsOnly to make sure it's
// ignored in relative mode
compareEndPointsOnly: true,
table,
}
const chart = new ScatterPlotChart({ manager })
it("drops series with a single point", () => {
expect(chart.series.length).toEqual(1)
})
it("calculates average annual change based on originalTime", () => {
const point = chart.series[0].points[0]
expect(point.x).toEqual(100)
expect(Math.abs(point.y)).toEqual(0)
})
it("formats axes with %", () => {
expect(chart.dualAxis.verticalAxis.formatTick(0)).toEqual("+0%")
expect(chart.dualAxis.horizontalAxis.formatTick(0)).toEqual("+0%")
})
it("ignores config and sets linear axes", () => {
expect(chart.dualAxis.horizontalAxis.canChangeScaleType).toBeFalsy()
expect(chart.dualAxis.verticalAxis.canChangeScaleType).toBeFalsy()
expect(chart.dualAxis.horizontalAxis.scaleType).toEqual(
ScaleType.linear
)
expect(chart.dualAxis.verticalAxis.scaleType).toEqual(ScaleType.linear)
})
})
describe("scatter plot with xOverrideTime", () => {
const xOriginalTimeSlug = makeOriginalTimeSlugFromColumnSlug("x")
const table = new OwidTable(
[
[
"entityId",
"entityName",
"entityCode",
"day",
"x",
"y",
"color",
"size",
xOriginalTimeSlug,
],
[1, "UK", "", 2001, 0, 0, null, null, 2000],
[2, "Germany", "", 2001, 1, 1, null, null, 2001],
[3, "USA", "", 2001, 2, 2, null, null, 2003],
],
[
{
slug: "x",
type: ColumnTypeNames.Numeric,
display: { tolerance: 1 },
},
{ slug: "y", type: ColumnTypeNames.Numeric },
{ slug: "year", type: ColumnTypeNames.Year },
{ slug: "color", type: ColumnTypeNames.String },
{
slug: "size",
type: ColumnTypeNames.Numeric,
display: { tolerance: 1 },
},
{ slug: xOriginalTimeSlug, type: ColumnTypeNames.Year },
]
)
const manager: ScatterPlotManager = {
xColumnSlug: "x",
yColumnSlug: "y",
colorColumnSlug: "color",
sizeColumnSlug: "size",
table,
}
const chart = new ScatterPlotChart({ manager })
it("all points have correct times", () => {
expect(uniq(chart.allPoints.map((p) => p.timeValue))).toEqual([2001])
expect(uniq(chart.allPoints.map((p) => p.time.y))).toEqual([2001])
expect(chart.allPoints.map((p) => p.time.x)).toEqual(
expect.arrayContaining([2000, 2001, 2003])
)
})
})
describe("x/y tolerance", () => {
const table = new OwidTable(
[
[
"entityId",
"entityName",
"entityCode",
"year",
"x",
"y",
"color",
"size",
],
[1, "UK", "", 2000, 0, null, "Europe", 100],
[1, "UK", "", 2001, null, null, null, null],
[1, "UK", "", 2002, null, null, null, null],
[1, "UK", "", 2003, null, 3, null, null],
[1, "UK", "", 2004, null, null, null, null],
[1, "UK", "", 2005, 5, null, null, null],
[1, "UK", "", 2006, 6, 6, null, null],
[1, "UK", "", 2007, null, 7, null, null],
[1, "UK", "", 2008, 8, null, null, null],
[1, "UK", "", 2009, null, null, null, null],
[1, "UK", "", 2010, null, null, "Europe", 100],
// should be removed because it has no X/Y values
[2, "USA", "", 2020, null, null, "North America", 0],
],
[
{
slug: "x",
type: ColumnTypeNames.Numeric,
display: { tolerance: 3 },
},
{
slug: "y",
type: ColumnTypeNames.Numeric,
display: { tolerance: 3 },
},
{
slug: "color",
type: ColumnTypeNames.String,
display: { tolerance: 10 },
},
{
slug: "size",
type: ColumnTypeNames.Numeric,
},
]
)
const manager: ScatterPlotManager = {
xColumnSlug: "x",
yColumnSlug: "y",
colorColumnSlug: "color",
sizeColumnSlug: "size",
table,
}
const chart = new ScatterPlotChart({ manager })
const transformedTable = chart.transformedTable
it("removes rows without X or Y value", () => {
expect(transformedTable.get("year").values).toEqual([
2003, 2004, 2005, 2006, 2007, 2008,
])
})
it("applies tolerance on color & size before filtering rows", () => {
expect(
uniq(transformedTable.get("color").valuesIncludingErrorValues)
).toEqual(["Europe"])
expect(
uniq(transformedTable.get("size").valuesIncludingErrorValues)
).toEqual([100])
})
it("matches rows correctly", () => {
const xTimeSlug = makeOriginalTimeSlugFromColumnSlug("x")
const yTimeSlug = makeOriginalTimeSlugFromColumnSlug("y")
const rows = transformedTable.rows
expect(rows.length).toEqual(6)
const uniqRows = uniqBy(
rows,
(row) => `${row[xTimeSlug]}-${row[yTimeSlug]}`
)
expect(uniqRows).toEqual([
expect.objectContaining({
color: "Europe",
entityName: "UK",
size: 100,
x: 5,
[xTimeSlug]: 2005,
y: 3,
[yTimeSlug]: 2003,
year: 2003,
}),
expect.objectContaining({
color: "Europe",
entityName: "UK",
size: 100,
x: 6,
[xTimeSlug]: 2006,
y: 6,
[yTimeSlug]: 2006,
year: 2006,
}),
expect.objectContaining({
color: "Europe",
entityName: "UK",
size: 100,
x: 8,
[xTimeSlug]: 2008,
y: 7,
[yTimeSlug]: 2007,
year: 2008,
}),
])
})
})
describe("addCountryMode", () => {
const table = new OwidTable(
[
["entityId", "entityName", "entityCode", "year", "x", "y"],
[1, "UK", "", 2000, 1, 1],
[2, "USA", "", 2000, 2, 2],
],
[
{
slug: "x",
type: ColumnTypeNames.Numeric,
},
{
slug: "y",
type: ColumnTypeNames.Numeric,
},
]
)
const manager: ScatterPlotManager = {
xColumnSlug: "x",
yColumnSlug: "y",
colorColumnSlug: "color",
sizeColumnSlug: "size",
table,
selection: ["UK"],
}
it("doesn't filter any data for MultipleEntities mode", () => {
const chart = new ScatterPlotChart({
manager: {
...manager,
addCountryMode: EntitySelectionMode.MultipleEntities,
},
})
expect(chart.transformedTable.numRows).toEqual(2)
})
it("filters unselected data for SingleEntity mode", () => {
const chart = new ScatterPlotChart({
manager: {
...manager,
addCountryMode: EntitySelectionMode.SingleEntity,
},
})
expect(chart.transformedTable.numRows).toEqual(1)
})
it("filters unselected data for Disabled mode", () => {
const chart = new ScatterPlotChart({
manager: {
...manager,
addCountryMode: EntitySelectionMode.Disabled,
},
})
expect(chart.transformedTable.numRows).toEqual(1)
})
}) | the_stack |
import React, { ForwardRefExoticComponent, RefAttributes, useRef } from 'react';
import hoistNonReactStatics from 'hoist-non-react-statics';
// @ts-ignore
import { useForkRef } from '@semcore/utils/lib/ref';
// @ts-ignore
import useEnhancedEffect from '@semcore/utils/lib/use/useEnhancedEffect';
// @ts-ignore
import _assignProps from '@semcore/utils/lib/assignProps';
import Component, { PropsWithRenderFnChildren } from './Component';
import register from './register';
import childrenEnhancement, { CHILDREN_COMPONENT } from './enhancement/Children';
import rootEnhancement from './enhancement/Root';
import uncontrolledPropsEnhancement from './enhancement/uncontrolledProps';
import functionDefaultPropsEnhancement from './enhancement/functionDefaultProps';
import staticChildrenEnhancement, { STATIC_COMPONENT } from './enhancement/staticChildren';
import inheritedNameEnhancement, { INHERITED_NAME } from './enhancement/inheritedName';
import hoistPropsEnhancement from './enhancement/hoistProps';
import dataNameEnhancement from './enhancement/dataName';
import enhanceEnhancement from './enhancement/enhance';
import styleEnhancement from './enhancement/style';
import bindHandlerEnhancement from './enhancement/bindHandler';
const CORE_COMPONENT = Symbol('CORE_COMPONENT');
const CORE_INSTANCE = Symbol('CORE_INSTANCE');
const CORE_AS_PROPS = Symbol('CORE_AS_PROPS');
const CORE_INIT = Symbol('CORE_INIT');
const CONTEXT_COMPONENT = Symbol('CONTEXT_COMPONENT');
const CREATE_COMPONENT = Symbol('CREATE_COMPONENT');
const PARENT_COMPONENTS = Symbol('PARENT_COMPONENTS');
function isEmptyObject(obj) {
return (
Object.getOwnPropertyNames(obj).length === 0 &&
Object.getOwnPropertySymbols && // For IE 11
Object.getOwnPropertySymbols(obj).length === 0 &&
Object.getPrototypeOf(obj) === Object.prototype
);
}
function createGetField(enhancements, Component, isFunction) {
return function getField(key) {
return enhancements
.filter((enhancement) => {
if (!enhancement.condition) {
return true;
}
return enhancement.condition(Component, isFunction);
})
.reduce((acc, item) => {
if (item[key]) {
acc.push(item[key]);
}
return acc;
}, []);
};
}
function createForwardWrapper(Component, wrapperProps, statics, isFunction) {
// @ts-ignore
const WrapperComponent = React.forwardRef(function ({ forwardRef = null, ...other }, ref) {
// WRAPPER PROPS
const { ref: enhancementRef = null, ...props } = wrapperProps.reduce(
(acc, enhancement) => enhancement(acc, WrapperComponent, isFunction),
other,
);
// @ts-ignore
return <Component {...props} forwardRef={useForkRef(enhancementRef, ref, forwardRef)} />;
});
hoistNonReactStatics(WrapperComponent, Component);
WrapperComponent.displayName = Component.displayName;
// TODO: defaultProps is only empty
WrapperComponent.defaultProps = Component.defaultProps;
// STATIC
statics.forEach((enhancement) =>
Object.assign(WrapperComponent, enhancement(WrapperComponent, isFunction)),
);
return WrapperComponent;
}
function wrapClass(OriginComponent, enhancements, Context) {
const getField = createGetField(enhancements, OriginComponent, false);
const inits = getField('init');
const props = getField('asProps');
const renders = getField('render');
const statics = getField('static');
const wrapperProps = getField('wrapperProps');
const contexts = getField('context');
class Component extends OriginComponent {
[CORE_AS_PROPS] = null;
[CORE_INSTANCE] = WrapperComponent;
[CORE_INIT] = false;
constructor(props, context) {
super(props, context);
// INITS
inits.forEach((enhancement) => enhancement.call(this, props, WrapperComponent, false));
this[CORE_INIT] = true;
}
get asProps() {
// TODO if not production
if (!this[CORE_INIT]) {
throw new Error('Can not call asProps in constructor');
}
if (!this[CORE_AS_PROPS]) {
// PROPS
this[CORE_AS_PROPS] = props.reduce(
(acc, enhancement) => enhancement.call(this, acc, WrapperComponent, false),
this.props,
);
}
return this[CORE_AS_PROPS];
}
setContext() {
const contextProps = super.setContext ? super.setContext() : {};
// CONTEXT
return contexts.reduce(
(acc, enhancement) => enhancement.call(this, acc, WrapperComponent, false),
contextProps,
);
}
render() {
this[CORE_AS_PROPS] = null;
// TODO if not production
if (!super.render) {
throw new Error('нужно определить render метод');
}
const asProps = this.asProps;
const ctx = this.setContext();
// RENDER
const render = renders.reduce(
(acc, enhancement) => enhancement.call(this, acc, asProps, WrapperComponent, false),
super.render(),
);
if (!WrapperComponent[STATIC_COMPONENT] && !isEmptyObject(ctx)) {
return <Context.Provider value={{ ...asProps, ...ctx }}>{render}</Context.Provider>;
} else {
return render;
}
}
}
const WrapperComponent = createForwardWrapper(Component, wrapperProps, statics, false);
return WrapperComponent;
}
function wrapFunction(OriginComponent, enhancements, Context) {
const getField = createGetField(enhancements, OriginComponent, true);
const inits = getField('init');
const props = getField('asProps');
const renders = getField('render');
const statics = getField('static');
const wrapperProps = getField('wrapperProps');
const contexts = getField('context');
const Component = React.memo(function FunctionMemoComponent(other) {
const selfRef = useRef({});
const firstRender = useRef(true);
useEnhancedEffect(() => {
firstRender.current = false;
}, []);
if (firstRender.current) {
// INITS
inits.forEach((enhancement) =>
enhancement.call(selfRef.current, other, WrapperComponent, true),
);
}
// PROPS
const asProps = props.reduce(
(acc, enhancement) => enhancement.call(selfRef.current, acc, WrapperComponent, true),
other,
);
// CONTEXT
const ctx = contexts.reduce(
(acc, enhancement) => enhancement.call(selfRef.current, acc, WrapperComponent, true),
{},
);
// RENDER
const render = renders.reduce(
(acc, enhancement) => enhancement.call(selfRef.current, acc, asProps, WrapperComponent, true),
<OriginComponent {...asProps} />,
);
if (!WrapperComponent[STATIC_COMPONENT] && !isEmptyObject(ctx)) {
return <Context.Provider value={{ ...asProps, ...ctx }}>{render}</Context.Provider>;
} else {
return render;
}
});
Object.assign(Component, OriginComponent);
const WrapperComponent = createForwardWrapper(Component, wrapperProps, statics, true);
return WrapperComponent;
}
function wrapCore(OriginComponent, enhancements, Context) {
const Component = function ({ Root }) {
return <Root render={OriginComponent} />;
};
hoistNonReactStatics(Component, OriginComponent);
Component.displayName = OriginComponent.displayName;
Component.defaultProps = OriginComponent.defaultProps;
return wrapFunction(Component, enhancements, Context);
}
function createComposeComponent(OriginComponent, Context, enhancements): any {
if (
React.PureComponent.isPrototypeOf(OriginComponent) ||
React.Component.isPrototypeOf(OriginComponent)
) {
if (OriginComponent.prototype instanceof Component) {
return wrapClass(OriginComponent, enhancements, Context);
} else {
throw new Error('Must inherit from our component');
}
} else if (typeof OriginComponent === 'function') {
return wrapFunction(OriginComponent, enhancements, Context);
} else if (OriginComponent[CORE_COMPONENT]) {
return wrapCore(OriginComponent, enhancements, Context);
} else {
throw new Error('Must be a React component');
}
}
export type PropsAndRef<T, Ctx, UCProps> = PropsWithRenderFnChildren<T, Ctx, UCProps> &
RefAttributes<unknown>;
export type ForwardRefComponent<T, Ctx, UCProps> = ForwardRefExoticComponent<
PropsAndRef<T, Ctx, UCProps>
>;
type ComponentOrProps<T, Context, UCProps> = T extends [infer ParentProps, infer ChildProps]
? ComponentType<ParentProps, ChildProps, Context, UCProps>
: ForwardRefComponent<T, Context, UCProps>;
export type ComponentType<
ComponentProps,
ChildComponentProps = {},
ContextType = {},
UCProps = {},
FNType = null
> = (FNType extends null
? ForwardRefComponent<ComponentProps, ContextType, UCProps>
: FNType & { displayName: string }) &
{
[K in keyof ChildComponentProps]: ComponentOrProps<
ChildComponentProps[K],
ContextType,
UCProps
>;
} & {
[CORE_COMPONENT]: boolean;
[CREATE_COMPONENT]: () => ComponentType<
ComponentProps,
ChildComponentProps,
ContextType,
UCProps
>;
};
interface ClassWithUncontrolledProps<Props> {
uncontrolledProps(): unknown;
}
function assignProps(p1, p2) {
return _assignProps(p2, p1);
}
function createComponent<ComponentProps, ChildComponentProps = {}, ContextType = {}, FNType = null>(
OriginComponent,
childComponents = {},
options: {
context?: React.Context<ContextType>;
parent?: ComponentType<unknown> | ComponentType<unknown>[];
enhancements?: [any];
} = {},
): ComponentType<
ComponentProps extends Component<infer Props> ? Props : ComponentProps,
ChildComponentProps,
ContextType,
ComponentProps extends ClassWithUncontrolledProps<any>
? ReturnType<ComponentProps['uncontrolledProps']>
: { [key: string]: (arg: unknown) => void },
FNType
> {
const {
context = React.createContext<ContextType>({} as ContextType),
parent = [],
enhancements = [],
} = options;
let parents = Array.isArray(parent) ? parent : [parent];
if (parents.length) {
const wholeFamily = parents.reduce((acc, parent) => {
if (parent[PARENT_COMPONENTS]) {
acc = [...parent[PARENT_COMPONENTS], ...acc];
}
return acc;
}, parents);
OriginComponent[PARENT_COMPONENTS] = wholeFamily;
parents = wholeFamily;
}
if (OriginComponent[CORE_COMPONENT]) {
parents.push(OriginComponent);
}
const Component = createComposeComponent(OriginComponent, context, [
// @ts-ignore
...enhancements.map((f) => f(context, parents, createComponent, childComponents)),
bindHandlerEnhancement(),
childrenEnhancement(context, parents),
// root must be under the children
rootEnhancement(),
uncontrolledPropsEnhancement(),
staticChildrenEnhancement(childComponents, createComponent, {
context,
parent,
enhancements,
}),
// functionDefaultProps must be under the staticChild
functionDefaultPropsEnhancement(),
// inheritedName must be under the staticChild
inheritedNameEnhancement(),
// dataName must be under the staticChild
dataNameEnhancement(),
// enhanceEnhancement must be under the functionDefaultPropsEnhancement
enhanceEnhancement(),
styleEnhancement(childComponents, context),
// must be the last one so any properties can be raised
hoistPropsEnhancement(childComponents, context),
]);
Component[CONTEXT_COMPONENT] = context;
Component[CREATE_COMPONENT] = function (
_OriginComponent = OriginComponent,
_childComponents = childComponents,
_options = options,
) {
return createComponent(_OriginComponent, _childComponents, _options);
};
Component[CORE_COMPONENT] = true;
return Component;
}
function createBaseComponent<ComponentProps>(OriginComponent): ComponentType<ComponentProps> {
let Component = null;
if (
!React.PureComponent.isPrototypeOf(OriginComponent) &&
!React.Component.isPrototypeOf(OriginComponent) &&
typeof OriginComponent === 'function'
) {
Component = React.forwardRef(OriginComponent);
Component.displayName = OriginComponent.displayName;
Component.defaultProps = {
'data-ui-name': OriginComponent.displayName,
...OriginComponent.defaultProps,
};
Component[CORE_COMPONENT] = true;
} else {
throw new Error('createBaseComponent accepts only functional component');
}
return Component;
}
export * from './Component';
export * from './styled';
export * from './register';
export {
createBaseComponent,
Component,
register,
CREATE_COMPONENT,
CORE_INSTANCE,
CONTEXT_COMPONENT,
CHILDREN_COMPONENT,
INHERITED_NAME,
CORE_COMPONENT,
STATIC_COMPONENT,
assignProps,
};
export default createComponent; | the_stack |
import React, { FunctionComponent, useRef, useState, useCallback } from "react";
import OverlayBox from "Components/Common/OverlayBox";
import FileDropZone from "../../../Add/Pages/AddFiles/FileDropZone";
import {
State,
createId,
DatasetStateUpdaterType,
Distribution,
DistributionSource,
DistributionState,
saveRuntimeStateToStorage,
DistributionCreationMethod,
getDistributionAddCallback
} from "Components/Dataset/Add/DatasetAddCommon";
import AsyncButton from "Components/Common/AsyncButton";
import AddDatasetFromLinkInput from "../../../Add/Pages/AddFiles/AddDatasetFromLinkInput";
import DistributionItem from "Components/Dataset/Add/DistributionItem";
import "./AddNewFilesModal.scss";
import promisifySetState from "helpers/promisifySetState";
type PropsType = {
stateData: State;
datasetStateUpdater: DatasetStateUpdaterType;
datasetId: string;
deleteDistributionHandler: (dist: string) => () => Promise<void>;
editDistributionHandler: (
distId: string
) => (updater: (distribution: Distribution) => Distribution) => void;
isOpen: boolean;
setIsOpen: (boolean) => void;
};
type PromiseListType = {
(key: string): Promise<void>;
};
const AddNewFilesModal: FunctionComponent<PropsType> = (props) => {
const {
deleteDistributionHandler,
setIsOpen,
datasetStateUpdater,
datasetId
} = props;
const { distributions } = props.stateData;
const [error, setError] = useState<Error | null>(null);
const [processingErrorMessage, setProcessingErrorMessage] = useState("");
const deletionPromisesRef = useRef<PromiseListType>({} as PromiseListType);
const renderDistList = (dists: Distribution[]) => {
return (
<div className="col-xs-12">
<div className="row">
{dists.map((file: Distribution, i) => {
let isLastRow;
if (dists.length % 2) {
isLastRow = i >= dists.length - 1;
} else {
isLastRow = i >= dists.length - 2;
}
const distId = file.id!;
const delHandler = async () => {
const deletionPromises =
deletionPromisesRef.current;
try {
setError(null);
deletionPromises[
distId
] = props.deleteDistributionHandler(distId)();
await deletionPromises[distId];
} catch (e) {
setError(e);
throw e;
}
};
return (
<div
key={i}
className={`col-xs-6 dataset-add-files-fileListItem ${
isLastRow ? "last-row" : ""
}`}
>
<DistributionItem
idx={i}
key={i}
className="small"
distribution={file}
onChange={props.editDistributionHandler(
file.id!
)}
onDelete={delHandler}
/>
</div>
);
})}
</div>
</div>
);
};
const uploadedDistributions = distributions.filter(
(item) =>
item.isAddConfirmed === false &&
item.creationSource === DistributionSource.File
);
const urlDistributions = distributions.filter(
(item) =>
item.isAddConfirmed === false &&
(item.creationSource === DistributionSource.Api ||
item.creationSource === DistributionSource.DatasetUrl)
);
const pendingDistributions = distributions.filter(
(item) =>
item._state !== DistributionState.Ready &&
item._state !== DistributionState.Drafting
);
const notReadyDistributions = distributions.filter(
(item) => item._state !== DistributionState.Ready
);
const closeModal = useCallback(async () => {
try {
setError(null);
const deletionPromises = deletionPromisesRef.current;
// --- wait for existing deletion job
await Promise.all(Object.values(deletionPromises));
// --- try to delete all existing files
await Promise.all(
uploadedDistributions.map((item) =>
deleteDistributionHandler(item.id!)()
)
);
// -- try to delete all existing url distributions
await Promise.all(
urlDistributions.map((item) =>
deleteDistributionHandler(item.id!)()
)
);
setIsOpen(false);
} catch (e) {
setError(e);
}
}, [
uploadedDistributions,
urlDistributions,
deleteDistributionHandler,
setIsOpen
]);
const onAddFiles = useCallback(async () => {
try {
setError(null);
await promisifySetState(datasetStateUpdater)((state) => {
const allNewDists = uploadedDistributions.concat(
urlDistributions
);
return {
...state,
distributions: state.distributions.map((dist) => {
if (allNewDists.find((item) => item.id === dist.id)) {
return { ...dist, isAddConfirmed: true };
} else {
return dist;
}
})
};
});
// --- save to draft
await saveRuntimeStateToStorage(datasetId, datasetStateUpdater);
setIsOpen(false);
} catch (e) {
setError(e);
}
}, [
uploadedDistributions,
urlDistributions,
datasetStateUpdater,
setIsOpen,
datasetId
]);
const manualCreate = useCallback(() => {
try {
setError(null);
if (
distributions.findIndex(
(item) => item._state === DistributionState.Drafting
) !== -1
) {
throw new Error(
"Please complete the current editing item before create a new one."
);
}
getDistributionAddCallback(datasetStateUpdater)({
id: createId("dist"),
creationSource: DistributionSource.File,
creationMethod: DistributionCreationMethod.Manual,
title: "Untitled",
modified: new Date(),
format: "",
license: "No License",
_state: DistributionState.Drafting,
isAddConfirmed: false,
isReplacementConfirmed: false,
useStorageApi: false
});
} catch (e) {
setError(e);
}
}, [datasetStateUpdater]);
return (
<OverlayBox
className="add-new-files-modal"
isOpen={props.isOpen}
title="Select the new content you want to add or replace"
onClose={closeModal}
showCloseButton={pendingDistributions.length ? false : true}
>
<div className="content-area">
<div className="small-heading">New files</div>
{uploadedDistributions.length ? (
<div className="file-items-area">
{renderDistList(uploadedDistributions)}
</div>
) : null}
<div className="cols-sm-12 file-drop-area">
<button
className="au-btn au-btn--secondary manual-create-file-button"
onClick={() => manualCreate()}
>
Manually Create File
</button>
<FileDropZone
stateData={props.stateData}
datasetId={props.datasetId}
datasetStateUpdater={props.datasetStateUpdater}
initDistProps={{
isAddConfirmed: false,
isReplacementConfirmed: false
}}
onError={(e) => {
console.error(e);
setError(e);
}}
/>
</div>
<div className="small-heading">
(and/or) New URL of an API or dataset online
</div>
{processingErrorMessage ? (
<div className="process-url-error-message au-body au-page-alerts au-page-alerts--warning">
<h3>{processingErrorMessage}</h3>
<div className="heading">Here’s what you can do:</div>
<ul>
<li>
Double check the URL below is correct and
without any typos. If you need to edit the URL,
do so below and press ‘Fetch’ again
</li>
<li>
If the URL looks correct, it’s possible we can’t
connect to the service or extract any meaningful
metadata from it. You may want to try again
later
</li>
<li>
If you want to continue using this URL you can,
however you’ll need to manually enter the
dataset metadata. Use the ‘Manually enter
metadata’ button below
</li>
</ul>
</div>
) : null}
<AddDatasetFromLinkInput
initDistProps={{
isAddConfirmed: false,
isReplacementConfirmed: false
}}
datasetStateUpdater={props.datasetStateUpdater}
onProcessingError={(e) => {
setProcessingErrorMessage(
"" + (e.message ? e.message : e)
);
}}
onClearProcessingError={() => setProcessingErrorMessage("")}
/>
{urlDistributions.length ? (
<div className="url-items-area">
{renderDistList(urlDistributions)}
</div>
) : null}
{error ? (
<div className="au-body au-page-alerts au-page-alerts--error">
<div>
<span>
Magda has encountered an error: {error?.message}
</span>
</div>
</div>
) : null}
</div>
<div className="bottom-button-area">
<AsyncButton
disabled={notReadyDistributions.length ? true : false}
onClick={onAddFiles}
>
Finish Adding
</AsyncButton>{" "}
<AsyncButton
isSecondary={true}
onClick={closeModal}
disabled={pendingDistributions.length ? true : false}
>
Cancel
</AsyncButton>
</div>
</OverlayBox>
);
};
export default AddNewFilesModal; | the_stack |
import { Serializer } from '@edtr-io/plugin'
import { ValueJSON } from 'slate'
/** @public */
export const serializer: Serializer<NewNode[], ValueJSON> = {
deserialize(serialized) {
return {
object: 'value',
document: {
object: 'document',
nodes: (serialized || []).map(deserializeNode),
},
} as ValueJSON
function deserializeNode(node: NewNode) {
if (isNewElement(node)) {
return deserializeElement(node)
}
return deserializeText(node)
function deserializeElement(element: NewElement): OldElement {
switch (element.type) {
case 'p': {
const oldElement: OldParagraphElement = {
object: 'block',
type: 'paragraph',
nodes: element.children.map(deserializeNode),
}
return oldElement
}
case 'h': {
const oldElement: OldHeadingElement = {
object: 'block',
// The type assertion is necessary for api-extractor
// eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion
type: `@splish-me/h${element.level}` as OldHeadingElement['type'],
nodes: element.children.map(deserializeNode),
}
return oldElement
}
case 'a': {
const oldElement: OldLinkElement = {
object: 'inline',
type: '@splish-me/a',
data: {
href: element.href,
},
nodes: element.children.map(deserializeNode),
}
return oldElement
}
case 'math': {
if (element.inline) {
const oldElement: OldKatexInlineElement = {
object: 'inline',
type: '@splish-me/katex-inline',
data: {
formula: element.src,
inline: true,
},
isVoid: true,
nodes: element.children.map(deserializeNode),
}
return oldElement
}
const oldElement: OldKatexBlockElement = {
object: 'block',
type: '@splish-me/katex-block',
data: {
formula: element.src,
inline: false,
},
isVoid: true,
nodes: element.children.map(deserializeNode),
}
return oldElement
}
case 'ordered-list': {
const oldElement: OldOrderedListElement = {
object: 'block',
type: 'ordered-list',
nodes: element.children.map(deserializeNode),
}
return oldElement
}
case 'unordered-list': {
const oldElement: OldUnorderedListElement = {
object: 'block',
type: 'unordered-list',
nodes: element.children.map(deserializeNode),
}
return oldElement
}
case 'list-item': {
const oldElement: OldListItemElement = {
object: 'block',
type: 'list-item',
nodes: element.children.map(deserializeNode),
}
return oldElement
}
case 'list-item-child': {
const oldElement: OldListItemChildElement = {
object: 'block',
type: 'list-item-child',
nodes: element.children.map(deserializeNode),
}
return oldElement
}
}
}
function deserializeText(text: NewText): OldText {
const marks: OldMark[] = []
if (text.em) {
marks.push({ object: 'mark', type: '@splish-me/em' })
}
if (text.strong) {
marks.push({ object: 'mark', type: '@splish-me/strong' })
}
if (text.code) {
marks.push({ object: 'mark', type: 'code' })
}
if (text.color !== undefined) {
marks.push({
object: 'mark',
type: '@splish-me/color',
data: { colorIndex: text.color },
})
}
return {
object: 'text',
text: text.text,
marks: marks,
}
}
}
},
serialize(deserialized) {
const nodes = removeLeaves(
deserialized && deserialized.document
? (deserialized.document.nodes as OldNode[])
: []
)
if (!nodes) return []
return nodes.map(serializeNode)
function serializeNode(node: OldNode): NewNode {
if (node.object === 'text') {
return serializeText(node)
}
return serializeElement(node)
function serializeElement(element: OldElement): NewElement {
switch (element.type) {
case 'paragraph': {
const newElement: NewParagraphElement = {
type: 'p',
children: element.nodes.map(serializeNode),
}
return newElement
}
case '@splish-me/h1': {
const newElement: NewHeadingElement = {
type: 'h',
level: 1,
children: element.nodes.map(serializeNode),
}
return newElement
}
case '@splish-me/h2': {
const newElement: NewHeadingElement = {
type: 'h',
level: 2,
children: element.nodes.map(serializeNode),
}
return newElement
}
case '@splish-me/h3': {
const newElement: NewHeadingElement = {
type: 'h',
level: 3,
children: element.nodes.map(serializeNode),
}
return newElement
}
case '@splish-me/h4': {
const newElement: NewHeadingElement = {
type: 'h',
level: 4,
children: element.nodes.map(serializeNode),
}
return newElement
}
case '@splish-me/h5': {
const newElement: NewHeadingElement = {
type: 'h',
level: 5,
children: element.nodes.map(serializeNode),
}
return newElement
}
case '@splish-me/h6': {
const newElement: NewHeadingElement = {
type: 'h',
level: 6,
children: element.nodes.map(serializeNode),
}
return newElement
}
case '@splish-me/a': {
const newElement: NewLinkElement = {
type: 'a',
href: element.data.href,
children: element.nodes.map(serializeNode),
}
return newElement
}
case '@splish-me/katex-block': {
const newElement: NewMathElement = {
type: 'math',
src: element.data.formula,
inline: false,
children: element.nodes.map(serializeNode),
}
return newElement
}
case '@splish-me/katex-inline': {
const newElement: NewMathElement = {
type: 'math',
src: element.data.formula,
inline: true,
children: element.nodes.map(serializeNode),
}
return newElement
}
case 'ordered-list': {
const newElement: NewOrderedListElement = {
type: 'ordered-list',
children: element.nodes.map(serializeNode),
}
return newElement
}
case 'unordered-list': {
const newElement: NewUnorderedListElement = {
type: 'unordered-list',
children: element.nodes.map(serializeNode),
}
return newElement
}
case 'list-item': {
const newElement: NewListItemElement = {
type: 'list-item',
children: element.nodes.map(serializeNode),
}
return newElement
}
case 'list-item-child': {
const newElement: NewListItemChildElement = {
type: 'list-item-child',
children: element.nodes.map(serializeNode),
}
return newElement
}
}
}
function serializeText(text: OldText): NewText {
const newText: NewText = {
text: text.text,
}
const marks = text.marks || []
marks.forEach((mark) => {
switch (mark.type) {
case '@splish-me/strong':
newText.strong = true
return
case '@splish-me/em':
newText.em = true
return
case '@splish-me/color':
newText.color = mark.data.colorIndex
return
case 'code':
newText.code = true
return
}
})
return newText
}
}
},
}
/** @public */
export interface NewText {
text: string
code?: boolean
color?: number
em?: boolean
strong?: boolean
}
/** @public */
export interface NewParagraphElement {
type: 'p'
children: NewNode[]
}
/** @public */
export interface NewHeadingElement {
type: 'h'
level: 1 | 2 | 3 | 4 | 5 | 6
children: NewNode[]
}
/** @public */
export interface NewLinkElement {
type: 'a'
href: string
children: NewNode[]
}
/** @public */
export interface NewMathElement {
type: 'math'
src: string
inline: boolean
children: NewNode[]
}
/** @public */
export interface NewOrderedListElement {
type: 'ordered-list'
children: NewNode[]
}
/** @public */
export interface NewUnorderedListElement {
type: 'unordered-list'
children: NewNode[]
}
/** @public */
export interface NewListItemElement {
type: 'list-item'
children: NewNode[]
}
/** @public */
export interface NewListItemChildElement {
type: 'list-item-child'
children: NewNode[]
}
/** @public */
export type NewElement =
| NewParagraphElement
| NewHeadingElement
| NewLinkElement
| NewMathElement
| NewOrderedListElement
| NewUnorderedListElement
| NewListItemElement
| NewListItemChildElement
/** @public */
export type NewNode = NewText | NewElement
function isNewElement(node: NewNode): node is NewElement {
return (node as NewElement).children !== undefined
}
/** @public */
export interface OldStrongMark {
object: 'mark'
type: '@splish-me/strong'
}
/** @public */
export interface OldEmphasizeMark {
object: 'mark'
type: '@splish-me/em'
}
/** @public */
export interface OldColorMark {
object: 'mark'
type: '@splish-me/color'
data: { colorIndex: number }
}
/** @public */
export interface OldCodeMark {
object: 'mark'
type: 'code'
}
/** @public */
export type OldMark =
| OldStrongMark
| OldEmphasizeMark
| OldColorMark
| OldCodeMark
/** @public */
export interface OldText {
object: 'text'
text: string
marks?: OldMark[]
}
/** @public */
export interface OldParagraphElement {
object: 'block'
type: 'paragraph'
nodes: OldNode[]
}
/** @public */
export interface OldHeadingElement {
object: 'block'
type:
| '@splish-me/h1'
| '@splish-me/h2'
| '@splish-me/h3'
| '@splish-me/h4'
| '@splish-me/h5'
| '@splish-me/h6'
nodes: OldNode[]
}
/** @public */
export interface OldLinkElement {
object: 'inline'
type: '@splish-me/a'
data: {
href: string
}
nodes: OldNode[]
}
/** @public */
export interface OldKatexInlineElement {
object: 'inline'
type: '@splish-me/katex-inline'
data: {
formula: string
inline: true
}
isVoid: true
nodes: OldNode[]
}
/** @public */
export interface OldKatexBlockElement {
object: 'block'
type: '@splish-me/katex-block'
data: {
formula: string
inline: false
}
isVoid: true
nodes: OldNode[]
}
/** @public */
export interface OldOrderedListElement {
object: 'block'
type: 'ordered-list'
nodes: OldNode[]
}
/** @public */
export interface OldUnorderedListElement {
object: 'block'
type: 'unordered-list'
nodes: OldNode[]
}
/** @public */
export interface OldListItemElement {
object: 'block'
type: 'list-item'
nodes: OldNode[]
}
/** @public */
export interface OldListItemChildElement {
object: 'block'
type: 'list-item-child'
nodes: OldNode[]
}
/** @public */
export type OldElement =
| OldParagraphElement
| OldHeadingElement
| OldLinkElement
| OldKatexInlineElement
| OldKatexBlockElement
| OldOrderedListElement
| OldUnorderedListElement
| OldListItemElement
| OldListItemChildElement
/** @public */
export type OldNode = OldText | OldElement
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function removeLeaves(nodes: any[]) {
if (!nodes) {
return []
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any, @typescript-eslint/no-unsafe-assignment
const cleanedNodes: any[] = nodes.reduce((acc, node) => {
/* eslint-disable @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-unsafe-return */
if (node.leaves) {
// we don't need the node itself, as we expect it to be a text node
return [
...acc,
// eslint-disable-next-line @typescript-eslint/no-explicit-any, @typescript-eslint/no-unsafe-call
...node.leaves.map((leave: any) => ({
...leave,
object: 'text',
})),
]
} else {
const cleanedNode = node.nodes
? {
...node,
nodes: removeLeaves(node.nodes),
}
: node
return [...acc, cleanedNode]
}
/* eslint-enable @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-unsafe-return */
}, [])
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
return cleanedNodes
} | the_stack |
'use strict';
import update, { Context, extend, isEquals } from '.';
describe('immutability-helper module', () => {
it('exports a Context class', () => {
expect(Context).not.toBeUndefined();
expect(new Context() instanceof Context).toBe(true);
});
it('exports an isEquals method', () => {
expect(typeof isEquals).toBe('function');
});
it('exports an extend method', () => {
expect(typeof extend).toBe('function');
});
describe('default export', () => {
it('is a function', () => {
expect(typeof update).toBe('function');
});
it('has an extend method', () => {
expect(typeof (update as any).extend).toBe('function');
});
});
describe('$push', () => {
it('pushes', () => {
expect(update([1], {$push: [7]})).toEqual([1, 7]);
});
it('does not mutate the original object', () => {
const obj = Object.freeze([1]);
expect(() => update(obj, {$push: [7]})).not.toThrow();
});
it('only pushes an array', () => {
expect(() => update([], {$push: 7} as any)).toThrow(
'update(): expected spec of $push to be an array; got 7. Did you ' +
'forget to wrap your parameter in an array?',
);
});
it('only pushes unto an array', () => {
expect(() => update(1, {$push: 7} as any)).toThrow(
'update(): expected target of $push to be an array; got 1.',
);
});
it('keeps reference equality when possible', () => {
const original = ['x'];
expect(update(original, {$push: []})).toBe(original);
});
});
describe('$unshift', () => {
it('unshifts', () => {
expect(update([1], {$unshift: [7]})).toEqual([7, 1]);
});
it('does not mutate the original object', () => {
const obj = Object.freeze([1]);
expect(() => update(obj, {$unshift: [7]})).not.toThrow();
});
it('only unshifts an array', () => {
expect(() => update([], {$unshift: 7} as any)).toThrow(
'update(): expected spec of $unshift to be an array; got 7. Did you ' +
'forget to wrap your parameter in an array?',
);
});
it('only unshifts unto an array', () => {
expect(() => update(1, {$unshift: 7} as any)).toThrow(
'update(): expected target of $unshift to be an array; got 1.',
);
});
it('keeps reference equality when possible', () => {
const original = ['x'];
expect(update(original, {$unshift: []})).toBe(original);
});
});
describe('$splice', () => {
it('splices', () => {
expect(update([7, 8, 9], {$splice: [[2]]})).toEqual([7, 8]);
expect(update([5, 6, 7, 8], {$splice: [[1, 2]]})).toEqual([5, 8]);
expect(update([1, 4, 3], {$splice: [[1, 1, 2]]})).toEqual([1, 2, 3]);
expect(update([5, 4, 9], {$splice: [[1, 1, 6, 7, 8]]})).toEqual([5, 6, 7, 8, 9]);
});
it('does not mutate the original object', () => {
const obj = Object.freeze([1, 4, 3]);
expect(() => update(obj, {$splice: [[1, 1, 2]]})).not.toThrow();
});
it('only splices an array of arrays', () => {
expect(() => update([], {$splice: 1} as any)).toThrow(
'update(): expected spec of $splice to be an array of arrays; got 1. ' +
'Did you forget to wrap your parameters in an array?',
);
expect(() => update([], {$splice: [1]} as any)).toThrow(
'update(): expected spec of $splice to be an array of arrays; got 1. ' +
'Did you forget to wrap your parameters in an array?',
);
});
it('only splices unto an array', () => {
expect(() => update(1, {$splice: 7} as any)).toThrow(
'Expected $splice target to be an array; got 1',
);
});
it('keeps reference equality when possible', () => {
const original = ['x'];
expect(update(original, {$splice: [[]]} as any)).toBe(original);
});
});
describe('$merge', () => {
it('merges', () => {
expect(update({a: 'b'}, {$merge: {c: 'd'}} as any)).toEqual({a: 'b', c: 'd'});
});
it('does not mutate the original object', () => {
const obj = Object.freeze({a: 'b'});
expect(() => update(obj, {$merge: {a: 'c'}})).not.toThrow();
});
it('only merges with an object', () => {
expect(() => update({a: 'b'}, {$merge: 7} as any)).toThrow(
'update(): $merge expects a spec of type \'object\'; got 7',
);
});
it('only merges with an object', () => {
expect(() => update(7, {$merge: {a: 'b'}} as any)).toThrow(
'update(): $merge expects a target of type \'object\'; got 7',
);
});
it('keeps reference equality when possible', () => {
const original = {a: {b: {c: true}}};
expect(update(original, {a: {$merge: {}}})).toBe(original);
expect(update(original, {a: {$merge: { b: original.a.b }}})).toBe(original);
// Merging primatives of the same value should return the original.
expect(update(original, {a: {b: { $merge: {c: true} }}})).toBe(original);
// Two objects are different values even though they are deeply equal.
expect(update(original, {a: {$merge: { b: {c: true} }}})).not.toBe(original);
expect(update(original, {
a: {$merge: { b: original.a.b, c: false } as any},
})).not.toBe(original);
});
});
describe('$set', () => {
it('sets', () => {
expect(update({a: 'b'}, {$set: {c: 'd'}})).toEqual({c: 'd'});
});
it('does not mutate the original object', () => {
const obj = Object.freeze({a: 'b'});
expect(() => update(obj, {$set: {a: 'c'}})).not.toThrow();
});
it('keeps reference equality when possible', () => {
const original = {a: 1};
expect(update(original, {a: {$set: 1}})).toBe(original);
expect(update(original, {a: {$set: 2}})).not.toBe(original);
});
it('only allows the single $set item', () => {
expect(() => update({a: false}, {$set: true, bad: 123} as any)).toThrow(
'Cannot have more than one key in an object with $set',
);
});
it('setting a property to undefined should add an enumerable key to final object with value undefined', () => {
const original = {a: 1};
const result = update(original, {b: {$set: undefined}} as any);
expect(result).not.toBe(original);
expect(result).toEqual({a: 1, b: undefined});
expect(Object.keys(result).length).toEqual(2);
});
it('works on Map (E2E)', () => {
const state = new Map([['foo', 'FOO'], ['bar', 'BAR']]);
const modified = update(state, {foo: {$set: 'OFO' }});
expect(state).toEqual(new Map([['foo', 'FOO'], ['bar', 'BAR']]));
expect(modified).toEqual(new Map([['foo', 'OFO'], ['bar', 'BAR']]));
expect(state).not.toBe(modified);
});
});
describe('$toggle', () => {
it('only takes an array as spec', () => {
expect(() => update({a: false}, {$toggle: 'a'} as any)).toThrow(
'update(): expected spec of $toggle to be an array; got a. Did you ' +
'forget to wrap your parameter in an array?',
);
});
it('toggles false to true and true to false', () => {
expect(update({a: false, b: true}, {$toggle: ['a', 'b']})).toEqual({a: true, b: false});
});
it('does not mutate the original object', () => {
const obj = Object.freeze({a: false});
expect(() => update(obj, {$toggle: ['a']})).not.toThrow();
});
it('keeps reference equality when possible', () => {
const original = {a: false};
expect(update(original, {$toggle: []})).toBe(original);
expect(update(original, {$toggle: ['a']})).not.toBe(original);
});
});
describe('$unset', () => {
it('unsets', () => {
expect(update({a: 'b'}, {$unset: ['a']}).a).toBe(undefined as any);
});
it('removes the key from the object', () => {
const removed = update({a: 'b'}, {$unset: ['a']});
expect('a' in removed).toBe(false);
});
it('removes multiple keys from the object', () => {
const original = {a: 'b', c: 'd', e: 'f'};
const removed = update(original, {$unset: ['a', 'e']});
expect('a' in removed).toBe(false);
expect('a' in original).toBe(true);
expect('e' in removed).toBe(false);
expect('e' in original).toBe(true);
});
it('does not remove keys from the inherited properties', () => {
class Parent {
constructor(public foo = 'Parent') {
}
}
// tslint:disable-next-line:no-empty
function Child() {}
Child.prototype = new Parent();
const child = new Child();
expect(update(child, {$unset: ['foo']}).foo).toEqual('Parent');
});
it('keeps reference equality when possible', () => {
const original = {a: 1};
expect(update(original, {$unset: ['b']} as any)).toBe(original);
expect(update(original, {$unset: ['a']})).not.toBe(original);
});
});
describe('$add', () => {
it('works on Map', () => {
const state = new Map([[1, 2], [3, 4]]);
expect(update(state, {$add: [[5, 6]]})).toMatchSnapshot();
});
it('preserves original object if trying to add a duplicate Map item', () => {
const state = new Map([[1, 2]]);
const state2 = update(state, {$add: [[1, 2]]});
expect(state).toBe(state2);
});
it('works on Set', () => {
const state = new Set([1, 2, 3, 4]);
expect(update(state, {$add: [5, 6]})).toMatchSnapshot();
});
it('does not mutate the original object', () => {
const state = Object.freeze(new Set([1, 2, 3, 4])) as Set<number>;
expect(() => update(state, {$add: [5]})).not.toThrow();
});
it('throws on a non Map or Set', () => {
expect(() => update(2, {$add: [1]} as any)).toThrow(
'update(): $add expects a target of type Set or Map; got Number',
);
});
});
describe('$remove', () => {
it('works on Map', () => {
const state = new Map([[1, 2], [3, 4], [5, 6]]);
const state2 = update(state, {$remove: [1, 5]});
expect(state2.has(1)).toBe(false);
expect(state2.has(3)).toBe(true);
expect(state2.get(3)).toBe(4);
expect(state2.has(6)).toBe(false);
});
it('works on Set', () => {
const state = new Set([1, 2, 3, 4]);
const state2 = update(state, {$remove: [2, 3]});
expect(state2.has(1)).toBe(true);
expect(state2.has(2)).toBe(false);
});
it('throws on a non Map or Set', () => {
expect(() => update(2, {$remove: [1]} as any)).toThrow(
'update(): $remove expects a target of type Set or Map; got Number',
);
});
});
describe('$apply', () => {
const applier = node => ({v: node.v * 2});
it('applies', () => {
expect(update({v: 2}, {$apply: applier})).toEqual({v: 4});
});
it('does not mutate the original object', () => {
const obj = Object.freeze({v: 2});
expect(() => update(obj, {$apply: applier})).not.toThrow();
});
it('only applies a function', () => {
expect(() => update(2, {$apply: 123} as any)).toThrow(
'update(): expected spec of $apply to be a function; got 123.',
);
});
it('keeps reference equality when possible', () => {
const original = {a: {b: {}}};
function identity(val) {
return val;
}
expect(update(original, {a: {$apply: identity}})).toBe(original);
expect(update(original, {a: {$apply: applier}} as any)).not.toBe(original);
});
});
describe('direct apply', () => {
const applier = node => ({v: node.v * 2});
it('applies', () => {
const doubler = value => value * 2;
expect(update({v: 2}, applier)).toEqual({v: 4});
expect(update(2, doubler)).toEqual(4);
});
it('does not mutate the original object', () => {
const obj = Object.freeze({v: 2});
expect(() => update(obj, applier)).not.toThrow();
});
it('keeps reference equality when possible', () => {
const original = {a: {b: {}}};
function identity(val) {
return val;
}
expect(update(original, {a: identity})).toBe(original);
expect(update(original, {a: applier} as any)).not.toBe(original);
});
});
describe('deep update', () => {
it('works', () => {
expect(update({
a: 'b',
c: {
d: 'e',
f: [1],
g: [2],
h: [3],
i: {j: 'k'},
l: 4,
m: 'n',
},
}, {
c: {
d: {$set: 'm'},
f: {$push: [5]},
g: {$unshift: [6]},
h: {$splice: [[0, 1, 7]]},
i: {$merge: {n: 'o'}},
l: {$apply: x => x * 2},
m: x => x + x,
},
} as any)).toEqual({
a: 'b',
c: {
d: 'm',
f: [1, 5],
g: [6, 2],
h: [7],
i: {j: 'k', n: 'o'},
l: 8,
m: 'nn',
},
});
});
it('keeps reference equality when possible', () => {
const original = {a: {b: 1}, c: {d: {e: 1}}};
expect(update(original, {a: {b: {$set: 1}}})).toBe(original);
expect(update(original, {a: {b: {$set: 1}}}).a).toBe(original.a);
expect(update(original, {c: {d: {e: {$set: 1}}}})).toBe(original);
expect(update(original, {c: {d: {e: {$set: 1}}}}).c).toBe(original.c);
expect(update(original, {c: {d: {e: {$set: 1}}}}).c.d).toBe(original.c.d);
expect(update(original, {
a: {b: {$set: 1}},
c: {d: {e: {$set: 1}}},
})).toBe(original);
expect(update(original, {
a: {b: {$set: 1}},
c: {d: {e: {$set: 1}}},
}).a).toBe(original.a);
expect(update(original, {
a: {b: {$set: 1}},
c: {d: {e: {$set: 1}}},
}).c).toBe(original.c);
expect(update(original, {
a: {b: {$set: 1}},
c: {d: {e: {$set: 1}}},
}).c.d).toBe(original.c.d);
expect(update(original, {a: {b: {$set: 2}}})).not.toBe(original);
expect(update(original, {a: {b: {$set: 2}}}).a).not.toBe(original.a);
expect(update(original, {a: {b: {$set: 2}}}).a.b).not.toBe(original.a.b);
expect(update(original, {a: {b: {$set: 2}}}).c).toBe(original.c);
expect(update(original, {a: {b: {$set: 2}}}).c.d).toBe(original.c.d);
});
});
it('should accept array spec to modify arrays', () => {
const original = {value: [{a: 0}]};
const modified = update(original, {value: [{a: {$set: 1}}]} as any);
expect(modified).toEqual({value: [{a: 1}]});
});
it('should accept object spec to modify arrays', () => {
const original = {value: [{a: 0}]};
const modified = update(original, {value: {0: {a: {$set: 1}}}});
expect(modified).toEqual({value: [{a: 1}]});
});
it('should reject arrays except as values of specific commands', () => {
const specs = [
[],
{a: []},
{a: {$set: []}, b: [[]]},
];
return Promise.all(specs.map(spec => {
expect(() => update({a: 'b'}, spec as any)).toThrow(
'update(): You provided an invalid spec to update(). The spec ' +
'may not contain an array except as the value of $set, $push, ' +
'$unshift, $splice or any custom command allowing an array value.',
);
}));
});
it('should reject non arrays from $unset', () => {
expect(() => update({a: 'b'}, {$unset: 'a'} as any)).toThrow(
'update(): expected spec of $unset to be an array; got a. ' +
'Did you forget to wrap your parameter in an array?',
);
});
it('should require a plain object spec containing command(s)', () => {
const specs = [
null,
false,
{a: 'c'},
{a: {b: 'c'}},
];
return Promise.all(specs.map(spec => {
expect(() => update({a: 'b'}, spec as any)).toThrow(
'update(): You provided an invalid spec to update(). The spec ' +
'and every included key path must be plain objects containing one ' +
'of the following commands: $push, $unshift, $splice, $set, $toggle, $unset, ' +
'$add, $remove, $merge, $apply.',
);
}));
});
it('should perform safe hasOwnProperty check', () => {
expect(update({}, {hasOwnProperty: {$set: 'a'}})).toEqual({
hasOwnProperty: 'a',
});
});
});
describe('new Context()', () => {
let myContext: Context;
beforeEach(() => {
myContext = new Context();
});
it('has an update method', () => {
expect(typeof myContext.update).toBe('function');
});
it('has an extend method that matches a (deprecated) update.extend', () => {
expect(myContext.extend).toBe((myContext.update as any).extend);
});
it('has an isEquals method that calls a (deprecated) update.isEquals', () => {
const spy = jest.spyOn(myContext.update as any, 'isEquals');
myContext.isEquals('foo', 'foo');
expect(spy).toHaveBeenCalled();
});
describe('can extend functionality', () => {
it('allows adding new directives', () => {
myContext.extend<number>('$addtax', (tax, original) => {
return original + (tax * original);
});
expect(myContext.update(5, {$addtax: 0.10} as any)).toEqual(5.5);
});
it('gets the original object (so be careful about mutations)', () => {
const obj = {};
let passedOriginal: any;
myContext.extend<any>('$foobar', (_prop, original) => {
passedOriginal = original;
});
myContext.update(obj, {$foobar: null});
expect(obj).toBe(passedOriginal);
});
it('doesn\'t touch the original update', () => {
myContext.extend<number>('$addtax', (tax, original) => {
return original + (tax * original);
});
expect(() => update({$addtax: 0.10}, {$addtax: 0.10} as any)).toThrow();
expect(() => myContext.update({$addtax: 0.10}, {$addtax: 0.10} as any)).not.toThrow();
});
});
it('can handle nibling directives', () => {
const obj = {a: [1, 2, 3], b: 'me'};
const spec = {
a: {$splice: [[0, 2]]},
// tslint:disable-next-line:object-literal-sort-keys
$merge: {b: 'you'},
};
expect(update(obj, spec)).toEqual({a: [3], b: 'you'});
});
if (typeof Symbol === 'function' && Symbol('TEST').toString() === 'Symbol(TEST)') {
describe('works with symbols', () => {
it('in the source object', () => {
const obj = {a: 1};
obj[Symbol.for('b')] = 2;
expect(update(obj, {c: {$set: 3}} as any)[Symbol.for('b')]).toEqual(2);
});
it('in the spec object', () => {
const obj = {a: 1};
obj[Symbol.for('b')] = 2;
const spec = {
[Symbol.for('b')]: {$set: 2},
};
expect(update(obj, spec)[Symbol.for('b')]).toEqual(2);
});
it('in the $merge command', () => {
const obj = {
a: 1,
[Symbol.for('b')]: {c: 3},
[Symbol.for('d')]: 4,
};
const spec = {
[Symbol.for('b')]: { $merge: {} },
};
spec[Symbol.for('b') as any].$merge[Symbol.for('e')] = 5;
const updated = update(obj, spec);
expect(updated[Symbol.for('b') as any][Symbol.for('e')]).toEqual(5);
expect(updated[Symbol.for('d') as any]).toEqual(4);
});
});
}
it('supports objects without prototypes', () => {
const obj = Object.create(null);
expect(() => update(obj, {$merge: {a: 'b'}})).not.toThrow();
});
it('supports objects with prototypes', () => {
const proto = { a: 'Proto' };
const obj = Object.create(proto);
expect(update(obj, { $merge: { b: 'Obj' } }).a).toEqual('Proto');
});
it('supports an escape hatch for isEquals', () => {
myContext.isEquals = (x, y) => {
return JSON.stringify(x) === JSON.stringify(y);
};
expect(myContext.isEquals).toBe((myContext.update as any).isEquals);
const a = {b: {c: {d: [4, 5]}}};
const b = myContext.update(a, {b: {c: {d: {$set: [4, 5]}}}});
const c = myContext.update(a, {b: {$set: {c: {d: [4, 5]}}}});
const d = myContext.update(a, {$set: {b: {c: {d: [4, 5]}}}});
expect(a).toBe(b);
expect(a).toBe(c);
expect(a).toBe(d);
});
it('supports an escape hatch for isEqual for shallow direct apply', () => {
myContext.isEquals = (x, y) => {
return JSON.stringify(x) === JSON.stringify(y);
};
expect(myContext.isEquals).toBe((myContext.update as any).isEquals);
const a = { b: 1 };
const b = myContext.update(a, () => ({ b: 1 }));
expect(a).toBe(b);
});
it('does not lose non integer keys of an array', () => {
interface IHasTop {
top: number;
}
const state = { items: [
{ name: 'Superman', strength: 1000 },
{ name: 'Jim', strength: 2 },
] };
(state.items as any as IHasTop).top = 0;
const state2 = update(state, { items: { 1: { strength: { $set: 3 } } } });
expect((state2.items as any as IHasTop).top).toBe(0);
});
it('supports Maps', () => {
const state = new Map([
['mapKey', 'mapValue'],
]);
const updatedState = update(state, {
['mapKey']: {$set: 'updatedMapValue' },
} as any);
expect(updatedState).toEqual(
new Map([
['mapKey', 'updatedMapValue'],
]),
);
});
it('supports nested objects inside Maps', () => {
const state = new Map([
['mapKey', { banana: 'yellow', apple: ['red'], blueberry: 'purple' }],
]);
const updatedState = update(state, {
['mapKey']: { apple: { $set: ['green', 'red'] } },
} as any);
expect(updatedState).toEqual(
new Map([
[
'mapKey',
{ banana: 'yellow', apple: ['green', 'red'], blueberry: 'purple' },
],
]),
);
});
it('supports Maps and keeps reference equality when possible', () => {
const original = new Map([['a', { b: 1 }]]);
expect(update(original, { a: { $merge: {} } } as any)).toBe(original);
expect(update(original, { a: { $merge: { c: 2 } } } as any)).not.toBe(original);
});
});
describe('update', () => {
it('has a (deprecated) newContext method', () => {
expect(typeof (update as any).newContext()).toBe('function');
});
});
describe('works with readonly arrays', () => {
interface Thing {
bar: {
foo: ReadonlyArray<{ baz: number }>;
};
}
const a: Thing = {
bar: { foo: [ {baz: 1} ] }
};
const b: Thing = {
bar: { foo: [ {baz: 2} ] }
};
expect(update(a, {
bar: {
foo: { $push: b.bar.foo }
}
})).toEqual({
bar: { foo: [{ baz: 1 }, { baz: 2 }]}
});
}); | the_stack |
import { createElement } from '@syncfusion/ej2-base';
import { Diagram } from '../../../src/diagram/diagram';
import { NodeModel } from '../../../src/diagram/objects/node-model';
import { ShapeAnnotationModel } from '../../../src/diagram/objects/annotation-model';
import { TextStyleModel } from '../../../src/diagram/core/appearance-model';
import { MouseEvents } from '../../../spec/diagram/interaction/mouseevents.spec'
import {profile , inMB, getMemoryProfile} from '../../../spec/common.spec';
/**
* Annotations - Changing Margin
*/
describe('Diagram Control', () => {
describe('Annotations with margin', () => {
let diagram: Diagram;
let ele: HTMLElement;
let mouseEvents: MouseEvents = new MouseEvents();
let annotation: ShapeAnnotationModel;
let node: NodeModel;
let node2: NodeModel;
let node3: NodeModel;
let node4: NodeModel;
let style: TextStyleModel = { strokeColor: 'black', opacity: 0.5, strokeWidth: 1 };
let pathData: string = 'M540.3643,137.9336L546.7973,159.7016L570.3633,159.7296L550.7723,171.9366L558.9053,' +
'194.9966L540.3643,179.4996L521.8223,194.9966L529.9553,171.9366' +
'L510.3633,159.7296L533.9313,159.7016L540.3643,137.9336z';
beforeAll((): void => {
const isDef = (o: any) => o !== undefined && o !== null;
if (!isDef(window.performance)) {
console.log("Unsupported environment, window.performance.memory is unavailable");
this.skip(); //Skips test (in Chai)
return;
}
ele = createElement('div', { id: 'diagram48' });
document.body.appendChild(ele);
node = {
id: 'node', width: 100, height: 100, offsetX: 100, offsetY: 100,
shape: {
type: 'Path', data: pathData
}
};
annotation = {
content: 'top left margin, top left alignment, offset 0', offset: { x: 0, y: 0 },
horizontalAlignment: 'Left', verticalAlignment: 'Top', margin: { left: 15, top: 10 },
style: style, width: 85
};
node.annotations = [annotation];
node2 = {
id: 'node2', width: 100, height: 100, offsetX: 300, offsetY: 100,
shape: { type: 'Path', data: pathData }
};
annotation = {
content: 'bottom right margin bottom right alignment, offset 1', offset: { x: 1, y: 1 },
horizontalAlignment: 'Right', verticalAlignment: 'Bottom',
margin: { right: 15, bottom: 15 },
style: style, width: 85
};
node2.annotations = [annotation];
node4 ={
id: 'node4', width: 50, height: 50, offsetX: 700, offsetY: 100, style: { fill: 'none' },
shape: { type: 'Image', source: '' }
},
node3 = {
id: 'node3', width: 100, height: 100, offsetX: 500, offsetY: 100,
shape: {
type: 'Path', data: 'M540.3643,137.9336L546.7973,159.7016L570.3633,159.7296L550.7723,171.9366L558.9053,' +
'194.9966L540.3643,179.4996L521.8223,194.9966L529.9553,171.9366' +
'L510.3633,159.7296L533.9313,159.7016L540.3643,137.9336z'
}
};
annotation = {
content: 'top margin, offset 0, top left alignment', offset: { x: 0, y: 0 },
horizontalAlignment: 'Left', verticalAlignment: 'Top', margin: { top: 15 },
style: style
};
node3.annotations = [annotation];
let node6: NodeModel = {
id: 'Decision', width: 150, height: 60, offsetX: 550, offsetY: 60,
shape: { type: 'Flow', shape: 'Card' },
annotations: [{
id: 'label6', content: 'Decision Process for new software ideas', offset: { x: 0.5, y: 0.5 },
style: { whiteSpace: 'PreserveAll' } as TextStyleModel
}]
};
diagram = new Diagram({
mode: 'Canvas',
width: '800px', height: '800px',
nodes: [node, node2, node3, node6,node4]
});
diagram.appendTo('#diagram48');
});
afterAll((): void => {
diagram.destroy();
ele.remove();
});
it('Checking annotation margin', (done: Function) => {
let wrapper = (diagram.nodes[0] as NodeModel).wrapper;
wrapper = (diagram.nodes[1] as NodeModel).wrapper;
wrapper = (diagram.nodes[2] as NodeModel).wrapper;
//first label
expect(((Math.ceil((diagram.nodes[0] as NodeModel).wrapper.children[1].actualSize.width) === 80 || Math.floor((diagram.nodes[0] as NodeModel).wrapper.children[1].actualSize.width) === 79)
&& (Math.floor((diagram.nodes[0] as NodeModel).wrapper.children[1].actualSize.height) === 57||Math.ceil((diagram.nodes[0] as NodeModel).wrapper.children[1].actualSize.height) === 58) &&
(((diagram.nodes[0] as NodeModel).wrapper.children[1].offsetX === 104.6796875)||(Math.floor((diagram.nodes[0] as NodeModel).wrapper.children[1].offsetX)===104)) &&
(diagram.nodes[0] as NodeModel).wrapper.children[1].offsetY === 88.8) &&
//second label
((Math.ceil((diagram.nodes[1] as NodeModel).wrapper.children[1].actualSize.width) === 83 || Math.floor((diagram.nodes[1] as NodeModel).wrapper.children[1].actualSize.width) === 82)
&& (diagram.nodes[1] as NodeModel).wrapper.children[1].actualSize.height === 57.599999999999994 &&
(((diagram.nodes[1] as NodeModel).wrapper.children[1].offsetX === 293.984375)||(Math.floor((diagram.nodes[1] as NodeModel).wrapper.children[1].offsetX) === 293)) &&
(diagram.nodes[1] as NodeModel).wrapper.children[1].offsetY === 106.2) &&
//third label
((Math.ceil((diagram.nodes[2] as NodeModel).wrapper.children[1].actualSize.width) === 94 || Math.floor((diagram.nodes[2] as NodeModel).wrapper.children[1].actualSize.width) === 93)
&& (diagram.nodes[2] as NodeModel).wrapper.children[1].actualSize.height === 43.199999999999996 &&
( ((diagram.nodes[2] as NodeModel).wrapper.children[1].offsetX === 496.578125)||(Math.floor((diagram.nodes[2] as NodeModel).wrapper.children[1].offsetX )=== 496))&&
(diagram.nodes[2] as NodeModel).wrapper.children[1].offsetY === 86.6)
).toBe(true);
done();
});
it('Checking annotation margin', (done: Function) => {
(diagram.nodes[0] as NodeModel).annotations[0].content = 'left margin, top left alignment, offset 0';
(diagram.nodes[0] as NodeModel).annotations[0].offset = { x: 0, y: 0 };
(diagram.nodes[0] as NodeModel).annotations[0].horizontalAlignment = 'Left';
(diagram.nodes[0] as NodeModel).annotations[0].verticalAlignment = 'Top';
(diagram.nodes[0] as NodeModel).annotations[0].margin = { top: 10, left: 0 };
(diagram.nodes[0] as NodeModel).annotations[0].width = 100;
(diagram.nodes[1] as NodeModel).annotations[0].content = 'right margin, bottom right alignment, offset 1';
(diagram.nodes[1] as NodeModel).annotations[0].offset = { x: 1, y: 1 };
(diagram.nodes[1] as NodeModel).annotations[0].horizontalAlignment = 'Right';
(diagram.nodes[1] as NodeModel).annotations[0].verticalAlignment = 'Bottom';
(diagram.nodes[1] as NodeModel).annotations[0].margin = { right: 15, bottom: 0 };
(diagram.nodes[1] as NodeModel).annotations[0].width = 85;
(diagram.nodes[2] as NodeModel).annotations[0].content = 'bottom margin, bottom right alignment, offset 1';
(diagram.nodes[2] as NodeModel).annotations[0].offset = { x: 1, y: 1 };
(diagram.nodes[2] as NodeModel).annotations[0].horizontalAlignment = 'Right';
(diagram.nodes[2] as NodeModel).annotations[0].verticalAlignment = 'Bottom';
(diagram.nodes[2] as NodeModel).annotations[0].margin = { bottom: 15, top: 0 };
diagram.dataBind();
let wrapper = (diagram.nodes[0] as NodeModel).wrapper;
wrapper = (diagram.nodes[1] as NodeModel).wrapper;
wrapper = (diagram.nodes[2] as NodeModel).wrapper;
//first label
expect(((Math.ceil((diagram.nodes[0] as NodeModel).wrapper.children[1].actualSize.width) === 100 || Math.ceil((diagram.nodes[0] as NodeModel).wrapper.children[1].actualSize.width) === 99 || Math.ceil((diagram.nodes[0] as NodeModel).wrapper.children[1].actualSize.width) >= 77 || Math.ceil((diagram.nodes[0] as NodeModel).wrapper.children[1].actualSize.width) <= 79 )
&& ((diagram.nodes[0] as NodeModel).wrapper.children[1].actualSize.height === 28.799999999999997 || (diagram.nodes[0] as NodeModel).wrapper.children[1].actualSize.height >=42 || (diagram.nodes[0] as NodeModel).wrapper.children[1].actualSize.height <= 46) &&
(((diagram.nodes[0] as NodeModel).wrapper.children[1].offsetX === 99.6875)||(Math.floor((diagram.nodes[0] as NodeModel).wrapper.children[1].offsetX )=== 99) || (Math.floor((diagram.nodes[0] as NodeModel).wrapper.children[1].offsetX )>=88) || (Math.floor((diagram.nodes[0] as NodeModel).wrapper.children[1].offsetX)<= 90)) &&
(diagram.nodes[0] as NodeModel).wrapper.children[1].offsetY === 74.4 || (diagram.nodes[0] as NodeModel).wrapper.children[1].offsetY >= 72 || (diagram.nodes[0] as NodeModel).wrapper.children[1].offsetY <= 76) &&
//second label
((Math.ceil((diagram.nodes[1] as NodeModel).wrapper.children[1].actualSize.width) === 67 || Math.floor((diagram.nodes[1] as NodeModel).wrapper.children[1].actualSize.width) === 66)
&& (diagram.nodes[1] as NodeModel).wrapper.children[1].actualSize.height === 57.599999999999994 &&
(((diagram.nodes[1] as NodeModel).wrapper.children[1].offsetX === 301.65625)||(Math.floor((diagram.nodes[1] as NodeModel).wrapper.children[1].offsetX) === 301)) &&
(diagram.nodes[1] as NodeModel).wrapper.children[1].offsetY === 121.2) &&
//third label
((Math.floor((diagram.nodes[2] as NodeModel).wrapper.children[1].actualSize.width) === 97 ||
Math.ceil((diagram.nodes[2] as NodeModel).wrapper.children[1].actualSize.width) === 98||Math.floor((diagram.nodes[2] as NodeModel).wrapper.children[1].actualSize.width) === 98)
&& (diagram.nodes[2] as NodeModel).wrapper.children[1].actualSize.height === 43.199999999999996 &&
Math.round((diagram.nodes[2] as NodeModel).wrapper.children[1].offsetX) === 501 &&
(diagram.nodes[2] as NodeModel).wrapper.children[1].offsetY === 113.4)
).toBe(true);
done();
});
it('memory leak', () => {
profile.sample();
let average: any = inMB(profile.averageChange)
//Check average change in memory samples to not be over 10MB
expect(average).toBeLessThan(10);
let memory: any = inMB(getMemoryProfile())
//Check the final memory usage against the first usage, there should be little change if everything was properly deallocated
expect(memory).toBeLessThan(profile.samples[0] + 0.25);
})
});
}); | the_stack |
import React, { useState, useEffect, useRef } from 'react'
import { useMutation, gql } from '@apollo/client'
import Checkbox from '@mui/material/Checkbox'
import DialogContentText from '@mui/material/DialogContentText'
import FormControlLabel from '@mui/material/FormControlLabel'
import FormHelperText from '@mui/material/FormHelperText'
import Grid from '@mui/material/Grid'
import Typography from '@mui/material/Typography'
import makeStyles from '@mui/styles/makeStyles'
import Alert from '@mui/material/Alert'
import AlertTitle from '@mui/material/AlertTitle'
import _ from 'lodash'
import { DateTime, Interval } from 'luxon'
import { fieldErrors, nonFieldErrors } from '../../util/errutil'
import FormDialog from '../../dialogs/FormDialog'
import { contentText, dtToDuration, Shift, TempSchedValue } from './sharedUtils'
import { FormContainer, FormField } from '../../forms'
import TempSchedAddNewShift from './TempSchedAddNewShift'
import { isISOAfter, parseInterval } from '../../util/shifts'
import { getNextWeekday } from '../../util/luxon-helpers'
import { useScheduleTZ } from './hooks'
import TempSchedShiftsList from './TempSchedShiftsList'
import { ISODateTimePicker } from '../../util/ISOPickers'
import { getCoverageGapItems } from './shiftsListUtil'
import { fmtLocal } from '../../util/timeFormat'
import { theme } from '../../mui'
const mutation = gql`
mutation ($input: SetTemporaryScheduleInput!) {
setTemporarySchedule(input: $input)
}
`
function shiftEquals(a: Shift, b: Shift): boolean {
return a.start === b.start && a.end === b.end && a.userID === b.userID
}
const useStyles = makeStyles<typeof theme>((theme) => ({
contentText,
avatar: {
backgroundColor: theme.palette.primary.main,
},
formContainer: {
height: '100%',
},
noCoverageError: {
marginTop: '.5rem',
marginBottom: '.5rem',
},
rightPane: {
[theme.breakpoints.down('md')]: {
marginTop: '1rem',
},
[theme.breakpoints.up('md')]: {
paddingLeft: '1rem',
},
overflow: 'hidden',
},
sticky: {
position: 'sticky',
top: 0,
},
tzNote: {
fontStyle: 'italic',
},
}))
type TempScheduleDialogProps = {
onClose: () => void
scheduleID: string
value: Partial<TempSchedValue>
}
const clampForward = (nowISO: string, iso: string | undefined): string => {
if (!iso) return ''
const now = DateTime.fromISO(nowISO)
const dt = DateTime.fromISO(iso)
if (dt < now) {
return now.toISO()
}
return iso
}
export default function TempSchedDialog({
onClose,
scheduleID,
value: _value,
}: TempScheduleDialogProps): JSX.Element {
const classes = useStyles()
const edit = !_.isEmpty(_value)
const { q, zone, isLocalZone } = useScheduleTZ(scheduleID)
const [now] = useState(DateTime.utc().startOf('minute').toISO())
const [showForm, setShowForm] = useState(false)
const [value, setValue] = useState({
start: clampForward(now, _value?.start),
end: _value?.end ?? '',
clearStart: _value?.start ?? null,
clearEnd: _value?.end ?? null,
shifts: (_value?.shifts ?? [])
.map((s) => _.pick(s, 'start', 'end', 'userID', 'displayStart'))
.filter((s) => {
if (DateTime.fromISO(s.end) > DateTime.fromISO(now)) {
s.displayStart = s.start
s.start = clampForward(now, s.start)
}
return true
}),
})
const [shift, setShift] = useState<Shift | null>(null)
const [allowNoCoverage, setAllowNoCoverage] = useState(false)
const [hasSubmitted, setHasSubmitted] = useState(false)
useEffect(() => {
// set default start, end times when zone is ready
if (!value.start && !value.end && !q.loading && zone) {
const nextMonday = getNextWeekday(1, DateTime.now(), zone)
const nextFriday = nextMonday.plus({ days: 5 }) // thru to the end of Friday
setValue({
...value,
start: nextMonday.toISO(),
end: nextFriday.toISO(),
})
}
}, [q.loading, zone])
function validate(): Error | null {
if (isISOAfter(value.start, value.end)) {
return new Error('Start date/time cannot be after end date/time.')
}
return null
}
const hasInvalidShift = (() => {
if (q.loading) return false
const schedInterval = parseInterval(value, zone)
return value.shifts.some(
(s) =>
DateTime.fromISO(s.end) > DateTime.fromISO(now) &&
!schedInterval.engulfs(parseInterval(s, zone)),
)
})()
const shiftErrors = hasInvalidShift
? [
{
message:
'One or more shifts extend beyond the start and/or end of this temporary schedule',
},
]
: []
function handleCoverageGapClick(coverageGap: Interval): void {
if (!showForm) setShowForm(true)
// make sure duration remains the same (evaluated off of the end timestamp)
const startDT = DateTime.fromISO(shift?.start ?? '', { zone })
const endDT = DateTime.fromISO(shift?.end ?? '', { zone })
const duration = dtToDuration(startDT, endDT)
const nextStart = coverageGap?.start
const nextEnd = nextStart.plus({ hours: duration })
setShift({
userID: shift?.userID ?? '',
start: nextStart.toISO(),
end: nextEnd.toISO(),
})
}
const hasCoverageGaps = (() => {
if (q.loading) return false
const schedInterval = parseInterval(value, zone)
return (
getCoverageGapItems(
schedInterval,
value.shifts,
zone,
handleCoverageGapClick,
).length > 0
)
})()
const [submit, { loading, error }] = useMutation(mutation, {
onCompleted: () => onClose(),
variables: {
input: {
start: value.start,
end: value.end,
clearStart: value.clearStart,
clearEnd: value.clearEnd,
shifts: value.shifts
.map((s) => _.pick(s, 'start', 'end', 'userID'))
.filter((s) => {
// clamp/filter out shifts that are in the past
if (DateTime.fromISO(s.end) <= DateTime.fromISO(now)) {
return false
}
s.start = clampForward(now, s.start)
return true
}),
scheduleID,
},
},
})
const shiftListRef = useRef<HTMLDivElement | null>(null)
const handleSubmit = (): void => {
setHasSubmitted(true)
if (hasCoverageGaps && !allowNoCoverage) {
// Scroll to show gap in coverage error on top of shift list
if (shiftListRef?.current) {
shiftListRef.current.scrollIntoView({ behavior: 'smooth' })
}
return
}
submit()
}
const nonFieldErrs = nonFieldErrors(error).map((e) => ({
message: e.message,
}))
const fieldErrs = fieldErrors(error).map((e) => ({
message: `${e.field}: ${e.message}`,
}))
const noCoverageErrs =
hasSubmitted && hasCoverageGaps && !allowNoCoverage
? [new Error('This temporary schedule has gaps in coverage.')]
: []
const errs = nonFieldErrs
.concat(fieldErrs)
.concat(shiftErrors)
.concat(noCoverageErrs)
return (
<FormDialog
fullHeight
maxWidth='lg'
title='Define a Temporary Schedule'
onClose={onClose}
loading={loading}
errors={errs}
notices={
!value.start ||
DateTime.fromISO(value.start, { zone }) >
DateTime.utc().minus({ hour: 1 }) ||
edit
? []
: [
{
type: 'WARNING',
message: 'Start time occurs in the past',
details:
'Any shifts or changes made to shifts in the past will be ignored when submitting.',
},
]
}
form={
<FormContainer
optionalLabels
disabled={loading}
value={value}
onChange={(newValue: TempSchedValue) =>
setValue({ ...value, ...newValue })
}
>
<Grid
container
className={classes.formContainer}
justifyContent='space-between'
>
{/* left pane */}
<Grid
item
xs={12}
md={6}
container
alignContent='flex-start'
spacing={2}
>
<Grid item xs={12}>
<DialogContentText className={classes.contentText}>
The schedule will be exactly as configured here for the entire
duration (ignoring all assignments and overrides).
</DialogContentText>
</Grid>
<Grid item xs={12}>
<Typography color='textSecondary' className={classes.tzNote}>
Configuring in {zone}
</Typography>
</Grid>
<Grid item xs={12} md={6}>
<FormField
fullWidth
component={ISODateTimePicker}
required
name='start'
label='Schedule Start'
min={now}
max={DateTime.fromISO(now, { zone })
.plus({ year: 1 })
.toISO()}
validate={() => validate()}
timeZone={zone}
disabled={q.loading}
hint={isLocalZone ? '' : fmtLocal(value.start)}
/>
</Grid>
<Grid item xs={12} md={6}>
<FormField
fullWidth
component={ISODateTimePicker}
required
name='end'
label='Schedule End'
min={value.start}
max={DateTime.fromISO(value.start, { zone })
.plus({ month: 3 })
.toISO()}
validate={() => validate()}
timeZone={zone}
disabled={q.loading}
hint={isLocalZone ? '' : fmtLocal(value.end)}
/>
</Grid>
<Grid item xs={12} className={classes.sticky}>
<TempSchedAddNewShift
value={value}
onChange={(shifts: Shift[]) => setValue({ ...value, shifts })}
scheduleID={scheduleID}
edit={edit}
showForm={showForm}
setShowForm={setShowForm}
shift={shift}
setShift={setShift}
/>
</Grid>
</Grid>
{/* right pane */}
<Grid
item
xs={12}
md={6}
container
spacing={2}
className={classes.rightPane}
>
<Grid item xs={12} ref={shiftListRef}>
<Typography variant='subtitle1' component='h3'>
Shifts
</Typography>
{hasSubmitted && hasCoverageGaps && (
<Alert severity='error' className={classes.noCoverageError}>
<AlertTitle>Gaps in coverage</AlertTitle>
<FormHelperText>
There are gaps in coverage. During these gaps, nobody on
the schedule will receive alerts. If you still want to
proceed, check the box below and retry.
</FormHelperText>
<FormControlLabel
label='Allow gaps in coverage'
labelPlacement='end'
control={
<Checkbox
data-cy='no-coverage-checkbox'
checked={allowNoCoverage}
onChange={(e) => setAllowNoCoverage(e.target.checked)}
name='allowCoverageGaps'
/>
}
/>
</Alert>
)}
<TempSchedShiftsList
scheduleID={scheduleID}
value={value.shifts}
start={value.start}
end={value.end}
onRemove={(shift: Shift) => {
setValue({
...value,
shifts: value.shifts.filter(
(s) => !shiftEquals(shift, s),
),
})
}}
edit={edit}
handleCoverageGapClick={handleCoverageGapClick}
/>
</Grid>
</Grid>
</Grid>
</FormContainer>
}
onSubmit={handleSubmit}
/>
)
} | the_stack |
import * as pulumi from "@pulumi/pulumi";
import { input as inputs, output as outputs, enums } from "../types";
import * as utilities from "../utilities";
import {Application, ApplicationVersion} from "./index";
/**
* Provides an Elastic Beanstalk Environment Resource. Elastic Beanstalk allows
* you to deploy and manage applications in the AWS cloud without worrying about
* the infrastructure that runs those applications.
*
* Environments are often things such as `development`, `integration`, or
* `production`.
*
* ## Example Usage
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as aws from "@pulumi/aws";
*
* const tftest = new aws.elasticbeanstalk.Application("tftest", {description: "tf-test-desc"});
* const tfenvtest = new aws.elasticbeanstalk.Environment("tfenvtest", {
* application: tftest.name,
* solutionStackName: "64bit Amazon Linux 2015.03 v2.0.3 running Go 1.4",
* });
* ```
* ## Option Settings
*
* Some options can be stack-specific, check [AWS Docs](https://docs.aws.amazon.com/elasticbeanstalk/latest/dg/command-options-general.html)
* for supported options and examples.
*
* The `setting` and `allSettings` mappings support the following format:
*
* * `namespace` - unique namespace identifying the option's associated AWS resource
* * `name` - name of the configuration option
* * `value` - value for the configuration option
* * `resource` - (Optional) resource name for [scheduled action](https://docs.aws.amazon.com/elasticbeanstalk/latest/dg/command-options-general.html#command-options-general-autoscalingscheduledaction)
*
* ### Example With Options
*
* ```typescript
* import * as pulumi from "@pulumi/pulumi";
* import * as aws from "@pulumi/aws";
*
* const tftest = new aws.elasticbeanstalk.Application("tftest", {description: "tf-test-desc"});
* const tfenvtest = new aws.elasticbeanstalk.Environment("tfenvtest", {
* application: tftest.name,
* solutionStackName: "64bit Amazon Linux 2015.03 v2.0.3 running Go 1.4",
* settings: [
* {
* namespace: "aws:ec2:vpc",
* name: "VPCId",
* value: "vpc-xxxxxxxx",
* },
* {
* namespace: "aws:ec2:vpc",
* name: "Subnets",
* value: "subnet-xxxxxxxx",
* },
* ],
* });
* ```
*
* ## Import
*
* Elastic Beanstalk Environments can be imported using the `id`, e.g.
*
* ```sh
* $ pulumi import aws:elasticbeanstalk/environment:Environment prodenv e-rpqsewtp2j
* ```
*/
export class Environment extends pulumi.CustomResource {
/**
* Get an existing Environment resource's state with the given name, ID, and optional extra
* properties used to qualify the lookup.
*
* @param name The _unique_ name of the resulting resource.
* @param id The _unique_ provider ID of the resource to lookup.
* @param state Any extra arguments used during the lookup.
* @param opts Optional settings to control the behavior of the CustomResource.
*/
public static get(name: string, id: pulumi.Input<pulumi.ID>, state?: EnvironmentState, opts?: pulumi.CustomResourceOptions): Environment {
return new Environment(name, <any>state, { ...opts, id: id });
}
/** @internal */
public static readonly __pulumiType = 'aws:elasticbeanstalk/environment:Environment';
/**
* Returns true if the given object is an instance of Environment. This is designed to work even
* when multiple copies of the Pulumi SDK have been loaded into the same process.
*/
public static isInstance(obj: any): obj is Environment {
if (obj === undefined || obj === null) {
return false;
}
return obj['__pulumiType'] === Environment.__pulumiType;
}
/**
* List of all option settings configured in this Environment. These
* are a combination of default settings and their overrides from `setting` in
* the configuration.
*/
public /*out*/ readonly allSettings!: pulumi.Output<outputs.elasticbeanstalk.EnvironmentAllSetting[]>;
/**
* Name of the application that contains the version
* to be deployed
*/
public readonly application!: pulumi.Output<string>;
public /*out*/ readonly arn!: pulumi.Output<string>;
/**
* The autoscaling groups used by this Environment.
*/
public /*out*/ readonly autoscalingGroups!: pulumi.Output<string[]>;
/**
* Fully qualified DNS name for this Environment.
*/
public /*out*/ readonly cname!: pulumi.Output<string>;
/**
* Prefix to use for the fully qualified DNS name of
* the Environment.
*/
public readonly cnamePrefix!: pulumi.Output<string>;
/**
* Short description of the Environment
*/
public readonly description!: pulumi.Output<string | undefined>;
/**
* The URL to the Load Balancer for this Environment
*/
public /*out*/ readonly endpointUrl!: pulumi.Output<string>;
/**
* Instances used by this Environment.
*/
public /*out*/ readonly instances!: pulumi.Output<string[]>;
/**
* Launch configurations in use by this Environment.
*/
public /*out*/ readonly launchConfigurations!: pulumi.Output<string[]>;
/**
* Elastic load balancers in use by this Environment.
*/
public /*out*/ readonly loadBalancers!: pulumi.Output<string[]>;
/**
* A unique name for this Environment. This name is used
* in the application URL
*/
public readonly name!: pulumi.Output<string>;
/**
* The [ARN](https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html) of the Elastic Beanstalk [Platform](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment.html#cfn-beanstalk-environment-platformarn)
* to use in deployment
*/
public readonly platformArn!: pulumi.Output<string>;
/**
* The time between polling the AWS API to
* check if changes have been applied. Use this to adjust the rate of API calls
* for any `create` or `update` action. Minimum `10s`, maximum `180s`. Omit this to
* use the default behavior, which is an exponential backoff
*/
public readonly pollInterval!: pulumi.Output<string | undefined>;
/**
* SQS queues in use by this Environment.
*/
public /*out*/ readonly queues!: pulumi.Output<string[]>;
/**
* Option settings to configure the new Environment. These
* override specific values that are set as defaults. The format is detailed
* below in Option Settings
*/
public readonly settings!: pulumi.Output<outputs.elasticbeanstalk.EnvironmentSetting[] | undefined>;
/**
* A solution stack to base your environment
* off of. Example stacks can be found in the [Amazon API documentation](https://docs.aws.amazon.com/elasticbeanstalk/latest/dg/concepts.platforms.html)
*/
public readonly solutionStackName!: pulumi.Output<string>;
public readonly tags!: pulumi.Output<{[key: string]: string} | undefined>;
public /*out*/ readonly tagsAll!: pulumi.Output<{[key: string]: string}>;
/**
* The name of the Elastic Beanstalk Configuration
* template to use in deployment
*/
public readonly templateName!: pulumi.Output<string | undefined>;
/**
* Elastic Beanstalk Environment tier. Valid values are `Worker`
* or `WebServer`. If tier is left blank `WebServer` will be used.
*/
public readonly tier!: pulumi.Output<string | undefined>;
/**
* Autoscaling triggers in use by this Environment.
*/
public /*out*/ readonly triggers!: pulumi.Output<string[]>;
/**
* The name of the Elastic Beanstalk Application Version
* to use in deployment.
*/
public readonly version!: pulumi.Output<ApplicationVersion>;
/**
* The maximum
* [duration](https://golang.org/pkg/time/#ParseDuration) that this provider should
* wait for an Elastic Beanstalk Environment to be in a ready state before timing
* out.
*/
public readonly waitForReadyTimeout!: pulumi.Output<string | undefined>;
/**
* Create a Environment resource with the given unique name, arguments, and options.
*
* @param name The _unique_ name of the resource.
* @param args The arguments to use to populate this resource's properties.
* @param opts A bag of options that control this resource's behavior.
*/
constructor(name: string, args: EnvironmentArgs, opts?: pulumi.CustomResourceOptions)
constructor(name: string, argsOrState?: EnvironmentArgs | EnvironmentState, opts?: pulumi.CustomResourceOptions) {
let inputs: pulumi.Inputs = {};
opts = opts || {};
if (opts.id) {
const state = argsOrState as EnvironmentState | undefined;
inputs["allSettings"] = state ? state.allSettings : undefined;
inputs["application"] = state ? state.application : undefined;
inputs["arn"] = state ? state.arn : undefined;
inputs["autoscalingGroups"] = state ? state.autoscalingGroups : undefined;
inputs["cname"] = state ? state.cname : undefined;
inputs["cnamePrefix"] = state ? state.cnamePrefix : undefined;
inputs["description"] = state ? state.description : undefined;
inputs["endpointUrl"] = state ? state.endpointUrl : undefined;
inputs["instances"] = state ? state.instances : undefined;
inputs["launchConfigurations"] = state ? state.launchConfigurations : undefined;
inputs["loadBalancers"] = state ? state.loadBalancers : undefined;
inputs["name"] = state ? state.name : undefined;
inputs["platformArn"] = state ? state.platformArn : undefined;
inputs["pollInterval"] = state ? state.pollInterval : undefined;
inputs["queues"] = state ? state.queues : undefined;
inputs["settings"] = state ? state.settings : undefined;
inputs["solutionStackName"] = state ? state.solutionStackName : undefined;
inputs["tags"] = state ? state.tags : undefined;
inputs["tagsAll"] = state ? state.tagsAll : undefined;
inputs["templateName"] = state ? state.templateName : undefined;
inputs["tier"] = state ? state.tier : undefined;
inputs["triggers"] = state ? state.triggers : undefined;
inputs["version"] = state ? state.version : undefined;
inputs["waitForReadyTimeout"] = state ? state.waitForReadyTimeout : undefined;
} else {
const args = argsOrState as EnvironmentArgs | undefined;
if ((!args || args.application === undefined) && !opts.urn) {
throw new Error("Missing required property 'application'");
}
inputs["application"] = args ? args.application : undefined;
inputs["cnamePrefix"] = args ? args.cnamePrefix : undefined;
inputs["description"] = args ? args.description : undefined;
inputs["name"] = args ? args.name : undefined;
inputs["platformArn"] = args ? args.platformArn : undefined;
inputs["pollInterval"] = args ? args.pollInterval : undefined;
inputs["settings"] = args ? args.settings : undefined;
inputs["solutionStackName"] = args ? args.solutionStackName : undefined;
inputs["tags"] = args ? args.tags : undefined;
inputs["templateName"] = args ? args.templateName : undefined;
inputs["tier"] = args ? args.tier : undefined;
inputs["version"] = args ? args.version : undefined;
inputs["waitForReadyTimeout"] = args ? args.waitForReadyTimeout : undefined;
inputs["allSettings"] = undefined /*out*/;
inputs["arn"] = undefined /*out*/;
inputs["autoscalingGroups"] = undefined /*out*/;
inputs["cname"] = undefined /*out*/;
inputs["endpointUrl"] = undefined /*out*/;
inputs["instances"] = undefined /*out*/;
inputs["launchConfigurations"] = undefined /*out*/;
inputs["loadBalancers"] = undefined /*out*/;
inputs["queues"] = undefined /*out*/;
inputs["tagsAll"] = undefined /*out*/;
inputs["triggers"] = undefined /*out*/;
}
if (!opts.version) {
opts = pulumi.mergeOptions(opts, { version: utilities.getVersion()});
}
super(Environment.__pulumiType, name, inputs, opts);
}
}
/**
* Input properties used for looking up and filtering Environment resources.
*/
export interface EnvironmentState {
/**
* List of all option settings configured in this Environment. These
* are a combination of default settings and their overrides from `setting` in
* the configuration.
*/
allSettings?: pulumi.Input<pulumi.Input<inputs.elasticbeanstalk.EnvironmentAllSetting>[]>;
/**
* Name of the application that contains the version
* to be deployed
*/
application?: pulumi.Input<string | Application>;
arn?: pulumi.Input<string>;
/**
* The autoscaling groups used by this Environment.
*/
autoscalingGroups?: pulumi.Input<pulumi.Input<string>[]>;
/**
* Fully qualified DNS name for this Environment.
*/
cname?: pulumi.Input<string>;
/**
* Prefix to use for the fully qualified DNS name of
* the Environment.
*/
cnamePrefix?: pulumi.Input<string>;
/**
* Short description of the Environment
*/
description?: pulumi.Input<string>;
/**
* The URL to the Load Balancer for this Environment
*/
endpointUrl?: pulumi.Input<string>;
/**
* Instances used by this Environment.
*/
instances?: pulumi.Input<pulumi.Input<string>[]>;
/**
* Launch configurations in use by this Environment.
*/
launchConfigurations?: pulumi.Input<pulumi.Input<string>[]>;
/**
* Elastic load balancers in use by this Environment.
*/
loadBalancers?: pulumi.Input<pulumi.Input<string>[]>;
/**
* A unique name for this Environment. This name is used
* in the application URL
*/
name?: pulumi.Input<string>;
/**
* The [ARN](https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html) of the Elastic Beanstalk [Platform](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment.html#cfn-beanstalk-environment-platformarn)
* to use in deployment
*/
platformArn?: pulumi.Input<string>;
/**
* The time between polling the AWS API to
* check if changes have been applied. Use this to adjust the rate of API calls
* for any `create` or `update` action. Minimum `10s`, maximum `180s`. Omit this to
* use the default behavior, which is an exponential backoff
*/
pollInterval?: pulumi.Input<string>;
/**
* SQS queues in use by this Environment.
*/
queues?: pulumi.Input<pulumi.Input<string>[]>;
/**
* Option settings to configure the new Environment. These
* override specific values that are set as defaults. The format is detailed
* below in Option Settings
*/
settings?: pulumi.Input<pulumi.Input<inputs.elasticbeanstalk.EnvironmentSetting>[]>;
/**
* A solution stack to base your environment
* off of. Example stacks can be found in the [Amazon API documentation](https://docs.aws.amazon.com/elasticbeanstalk/latest/dg/concepts.platforms.html)
*/
solutionStackName?: pulumi.Input<string>;
tags?: pulumi.Input<{[key: string]: pulumi.Input<string>}>;
tagsAll?: pulumi.Input<{[key: string]: pulumi.Input<string>}>;
/**
* The name of the Elastic Beanstalk Configuration
* template to use in deployment
*/
templateName?: pulumi.Input<string>;
/**
* Elastic Beanstalk Environment tier. Valid values are `Worker`
* or `WebServer`. If tier is left blank `WebServer` will be used.
*/
tier?: pulumi.Input<string>;
/**
* Autoscaling triggers in use by this Environment.
*/
triggers?: pulumi.Input<pulumi.Input<string>[]>;
/**
* The name of the Elastic Beanstalk Application Version
* to use in deployment.
*/
version?: pulumi.Input<ApplicationVersion>;
/**
* The maximum
* [duration](https://golang.org/pkg/time/#ParseDuration) that this provider should
* wait for an Elastic Beanstalk Environment to be in a ready state before timing
* out.
*/
waitForReadyTimeout?: pulumi.Input<string>;
}
/**
* The set of arguments for constructing a Environment resource.
*/
export interface EnvironmentArgs {
/**
* Name of the application that contains the version
* to be deployed
*/
application: pulumi.Input<string | Application>;
/**
* Prefix to use for the fully qualified DNS name of
* the Environment.
*/
cnamePrefix?: pulumi.Input<string>;
/**
* Short description of the Environment
*/
description?: pulumi.Input<string>;
/**
* A unique name for this Environment. This name is used
* in the application URL
*/
name?: pulumi.Input<string>;
/**
* The [ARN](https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html) of the Elastic Beanstalk [Platform](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-beanstalk-environment.html#cfn-beanstalk-environment-platformarn)
* to use in deployment
*/
platformArn?: pulumi.Input<string>;
/**
* The time between polling the AWS API to
* check if changes have been applied. Use this to adjust the rate of API calls
* for any `create` or `update` action. Minimum `10s`, maximum `180s`. Omit this to
* use the default behavior, which is an exponential backoff
*/
pollInterval?: pulumi.Input<string>;
/**
* Option settings to configure the new Environment. These
* override specific values that are set as defaults. The format is detailed
* below in Option Settings
*/
settings?: pulumi.Input<pulumi.Input<inputs.elasticbeanstalk.EnvironmentSetting>[]>;
/**
* A solution stack to base your environment
* off of. Example stacks can be found in the [Amazon API documentation](https://docs.aws.amazon.com/elasticbeanstalk/latest/dg/concepts.platforms.html)
*/
solutionStackName?: pulumi.Input<string>;
tags?: pulumi.Input<{[key: string]: pulumi.Input<string>}>;
/**
* The name of the Elastic Beanstalk Configuration
* template to use in deployment
*/
templateName?: pulumi.Input<string>;
/**
* Elastic Beanstalk Environment tier. Valid values are `Worker`
* or `WebServer`. If tier is left blank `WebServer` will be used.
*/
tier?: pulumi.Input<string>;
/**
* The name of the Elastic Beanstalk Application Version
* to use in deployment.
*/
version?: pulumi.Input<ApplicationVersion>;
/**
* The maximum
* [duration](https://golang.org/pkg/time/#ParseDuration) that this provider should
* wait for an Elastic Beanstalk Environment to be in a ready state before timing
* out.
*/
waitForReadyTimeout?: pulumi.Input<string>;
} | the_stack |
* @title: 3D Physics collision meshes
* @description:
* This sample shows how to create rigid body physics nodes with each of the collision mesh types (boxes, spheres,
* cones, cylinders, capsules and convex hulls) and a static triangle mesh.
*
* Rigid body cubes with an initial velocity can be fired into the scene from first person perspective by pressing
* space. Click on the rendering window to move and rotate the camera around.
*/
/*{{ javascript("jslib/aabbtree.js") }}*/
/*{{ javascript("jslib/camera.js") }}*/
/*{{ javascript("jslib/floor.js") }}*/
/*{{ javascript("jslib/geometry.js") }}*/
/*{{ javascript("jslib/material.js") }}*/
/*{{ javascript("jslib/light.js") }}*/
/*{{ javascript("jslib/scenenode.js") }}*/
/*{{ javascript("jslib/scene.js") }}*/
/*{{ javascript("jslib/vmath.js") }}*/
/*{{ javascript("jslib/effectmanager.js") }}*/
/*{{ javascript("jslib/shadermanager.js") }}*/
/*{{ javascript("jslib/texturemanager.js") }}*/
/*{{ javascript("jslib/renderingcommon.js") }}*/
/*{{ javascript("jslib/defaultrendering.js") }}*/
/*{{ javascript("jslib/resourceloader.js") }}*/
/*{{ javascript("jslib/scenedebugging.js") }}*/
/*{{ javascript("jslib/observer.js") }}*/
/*{{ javascript("jslib/physicsmanager.js") }}*/
/*{{ javascript("jslib/utilities.js") }}*/
/*{{ javascript("jslib/vertexbuffermanager.js") }}*/
/*{{ javascript("jslib/indexbuffermanager.js") }}*/
/*{{ javascript("jslib/mouseforces.js") }}*/
/*{{ javascript("jslib/utilities.js") }}*/
/*{{ javascript("jslib/requesthandler.js") }}*/
/*{{ javascript("jslib/services/turbulenzservices.js") }}*/
/*{{ javascript("jslib/services/turbulenzbridge.js") }}*/
/*{{ javascript("jslib/services/gamesession.js") }}*/
/*{{ javascript("jslib/services/mappingtable.js") }}*/
/*{{ javascript("scripts/htmlcontrols.js") }}*/
/*{{ javascript("scripts/sceneloader.js") }}*/
/*global TurbulenzEngine: true */
/*global DefaultRendering: false */
/*global RequestHandler: false */
/*global SceneLoader: false */
/*global SceneNode: false */
/*global TurbulenzServices: false */
/*global TextureManager: false */
/*global ShaderManager: false */
/*global EffectManager: false */
/*global Scene: false */
/*global Camera: false */
/*global CameraController: false */
/*global Floor: false */
/*global MouseForces: false */
/*global PhysicsManager: false */
/*global HTMLControls: false */
TurbulenzEngine.onload = function onloadFn()
{
var errorCallback = function errorCallback(msg)
{
window.alert(msg);
};
TurbulenzEngine.onerror = errorCallback;
var warningCallback = function warningCallback(msg)
{
window.alert(msg);
};
TurbulenzEngine.onwarning = warningCallback;
var mathDeviceParameters = { };
var mathDevice = TurbulenzEngine.createMathDevice(mathDeviceParameters);
var graphicsDeviceParameters = { };
var graphicsDevice = TurbulenzEngine.createGraphicsDevice(graphicsDeviceParameters);
var physicsDeviceParameters = { };
var physicsDevice = TurbulenzEngine.createPhysicsDevice(physicsDeviceParameters);
var dynamicsWorldParameters = { };
var dynamicsWorld = physicsDevice.createDynamicsWorld(dynamicsWorldParameters);
var inputDeviceParameters = { };
var inputDevice = TurbulenzEngine.createInputDevice(inputDeviceParameters);
var requestHandlerParameters = {};
var requestHandler = RequestHandler.create(requestHandlerParameters);
var textureManager = TextureManager.create(graphicsDevice, requestHandler, null, errorCallback);
var shaderManager = ShaderManager.create(graphicsDevice, requestHandler, null, errorCallback);
var effectManager = EffectManager.create();
var physicsManager = PhysicsManager.create(mathDevice, physicsDevice, dynamicsWorld);
var mappingTable;
var debugMode = true;
// Renderer and assets for the scene.
var renderer;
var scene = Scene.create(mathDevice);
var sceneLoader = SceneLoader.create();
var duckMesh;
// Setup world space
var clearColor = mathDevice.v4Build(0.95, 0.95, 1.0, 1.0);
var loadingClearColor = mathDevice.v4Build(0.8, 0.8, 0.8, 1.0);
var worldUp = mathDevice.v3BuildYAxis();
// Setup a camera to view a close-up object
var camera = Camera.create(mathDevice);
camera.nearPlane = 0.05;
var cameraDefaultPos = mathDevice.v3Build(14.5, 8.0, 18.1);
var cameraDefaultLook = mathDevice.v3Build(14.5, -(camera.farPlane / 2), -camera.farPlane);
// The objects needed to draw the crosshair
var technique2d;
var shader2d;
var techniqueParameters2d;
var chSemantics = graphicsDevice.createSemantics(['POSITION']);
var chFormats = [graphicsDevice.VERTEXFORMAT_FLOAT3];
// The objects needed to draw the contact callbacks
var contactsTechnique;
var contactsShader;
var contactsTechniqueParameters;
var contactsSemantics = graphicsDevice.createSemantics(['POSITION']);
var contactsFormats = [graphicsDevice.VERTEXFORMAT_FLOAT3];
var contactWorldTransform = mathDevice.m43BuildIdentity();
var contactWorldPoint = mathDevice.v3BuildZero();
var contacts = [];
var numContacts = 0;
// Setup world floor
var floor = Floor.create(graphicsDevice, mathDevice);
var cameraController = CameraController.create(graphicsDevice, inputDevice, camera);
// Mouse forces
var dragMin = mathDevice.v3Build(-50, -50, -50);
var dragMax = mathDevice.v3Build(50, 50, 50);
var mouseForces = MouseForces.create(graphicsDevice, inputDevice, mathDevice,
physicsDevice, dragMin, dragMax);
mouseForces.clamp = 400;
// Control codes
var keyCodes = inputDevice.keyCodes;
var mouseCodes = inputDevice.mouseCodes;
// Setup the box firing objects, including the inertia
var boxes = [];
var numBoxes = 12;
var fireCount = 0;
var cubeExtents = mathDevice.v3Build(0.5, 0.5, 0.5);
var boxShape = physicsDevice.createBoxShape({
halfExtents : cubeExtents,
margin : 0.001
});
var inertia = mathDevice.v3Copy(boxShape.inertia);
inertia = mathDevice.v3ScalarMul(inertia, 1.0);
function reset()
{
var halfPI = Math.PI / 2;
var halfExtents = duckMesh.localHalfExtents;
var yhalfExtent = halfExtents[1];
var j = 1;
function resetTransform(node, rotationMatrix?)
{
var body = node.physicsNodes[0].body;
dynamicsWorld.removeRigidBody(body);
body.transform = mathDevice.m43BuildTranslation(j * 5, yhalfExtent, 0, body.transform);
if (rotationMatrix)
{
body.transform = mathDevice.m43Mul(rotationMatrix, body.transform, body.transform);
}
body.linearVelocity = mathDevice.v3BuildZero();
body.angularVelocity = mathDevice.v3BuildZero();
body.active = true;
dynamicsWorld.addRigidBody(body);
j += 1;
};
// Reset ducks
var rootNode = scene.findNode("DuckBoxPhys");
resetTransform(rootNode);
rootNode = scene.findNode("DuckConePhys");
resetTransform(rootNode);
rootNode = scene.findNode("DuckCylinderPhys");
resetTransform(rootNode);
rootNode = scene.findNode("DuckSpherePhys");
resetTransform(rootNode);
rootNode = scene.findNode("DuckCapsulePhys");
var rot = mathDevice.m43FromAxisRotation(mathDevice.v3BuildXAxis(), halfPI);
mathDevice.m43SetAxisRotation(rot, mathDevice.v3BuildZAxis(), halfPI);
resetTransform(rootNode, rot);
rootNode = scene.findNode("DuckConvexHullPhys");
resetTransform(rootNode);
// Reset boxes
var count = 0;
if (fireCount > 0 && fireCount < numBoxes)
{
count = fireCount;
}
else if (fireCount >= numBoxes)
{
count = numBoxes;
}
for (var i = 0; i < count; i += 1)
{
var box = boxes[i];
var node = box.target;
physicsManager.deletePhysicsNode(box);
physicsManager.enableNode(node, false);
}
fireCount = 0;
// Reset camera
camera.lookAt(cameraDefaultLook, worldUp, cameraDefaultPos);
camera.updateViewMatrix();
}
function fireBox()
{
mouseForces.mouseX = 0.5;
mouseForces.mouseY = 0.5;
mouseForces.mouseZ = 0.0;
mouseForces.generatePickRay(camera.matrix,
1.0 / camera.recipViewWindowX,
1.0 / camera.recipViewWindowY,
camera.aspectRatio,
camera.farPlane);
var tr = mathDevice.m43BuildTranslation(mouseForces.pickRayFrom[0], mouseForces.pickRayFrom[1], mouseForces.pickRayFrom[2]);
var linVel = mathDevice.v3Build(mouseForces.pickRayTo[0] - mouseForces.pickRayFrom[0],
mouseForces.pickRayTo[1] - mouseForces.pickRayFrom[1],
mouseForces.pickRayTo[2] - mouseForces.pickRayFrom[2]);
mathDevice.v3Normalize(linVel, linVel);
mathDevice.v3ScalarMul(linVel, 50.0, linVel);
var box = boxes[fireCount % numBoxes];
physicsManager.deletePhysicsNode(box);
var node = box.target;
var body = box.body;
if (fireCount > numBoxes - 1)
{
physicsManager.enableNode(node, false);
}
body.transform = tr;
body.angularVelocity = mathDevice.v3BuildZero();
body.linearVelocity = linVel;
body.active = true;
physicsManager.physicsNodes.push(box);
physicsManager.dynamicPhysicsNodes.push(box);
physicsManager.enableNode(node, true);
fireCount += 1;
}
var onMouseDown = function (button)
{
if (mouseCodes.BUTTON_0 === button || mouseCodes.BUTTON_1 === button)
{
mouseForces.onmousedown();
}
};
var onMouseUp = function (button)
{
if (mouseCodes.BUTTON_0 === button || mouseCodes.BUTTON_1 === button)
{
mouseForces.onmouseup();
}
if (mouseCodes.BUTTON_2 === button)
{
mouseForces.onmouseup();
fireBox();
}
};
var onKeyUp = function physicsOnkeyupFn(keynum)
{
if (keynum === keyCodes.R) // 'r' key
{
reset();
}
if (keynum === keyCodes.SPACE) // Spacebar
{
fireBox();
}
else
{
cameraController.onkeyup(keynum);
}
};
// Add event listeners
inputDevice.addEventListener("keyup", onKeyUp);
inputDevice.addEventListener("mousedown", onMouseDown);
inputDevice.addEventListener("mouseup", onMouseUp);
// Controls
var htmlControls = HTMLControls.create();
htmlControls.addCheckboxControl({
id: "checkbox01",
value: "debugMode",
isSelected: debugMode,
fn: function ()
{
debugMode = !debugMode;
duckMesh.setDisabled(debugMode);
return debugMode;
}
});
htmlControls.register();
function drawCrosshair()
{
if (!mouseForces.pickedBody)
{
graphicsDevice.setTechnique(technique2d);
var screenWidth = graphicsDevice.width;
var screenHeight = graphicsDevice.height;
techniqueParameters2d.clipSpace = mathDevice.v4Build(2.0 / screenWidth, -2.0 / screenHeight, -1.0, 1.0);
graphicsDevice.setTechniqueParameters(techniqueParameters2d);
var writer = graphicsDevice.beginDraw(
graphicsDevice.PRIMITIVE_LINES, 4, chFormats, chSemantics);
if (writer)
{
var halfWidth = screenWidth * 0.5;
var halfHeight = screenHeight * 0.5;
writer([halfWidth - 10, halfHeight]);
writer([halfWidth + 10, halfHeight]);
writer([halfWidth, halfHeight - 10]);
writer([halfWidth, halfHeight + 10]);
graphicsDevice.endDraw(writer);
}
}
}
//function addContact(objectA, objectB, pairContact)
//{
// if (debugMode)
// {
// objectB.calculateTransform(contactWorldTransform);
// mathDevice.m43TransformPoint(contactWorldTransform, pairContact.localPointOnB, contactWorldPoint);
// var contactNormal = pairContact.worldNormalOnB;
// if (numContacts >= contacts.length)
// {
// contacts[contacts.length] = new Float32Array(6);
// }
// var contact = contacts[numContacts];
// contact[0] = contactWorldPoint[0];
// contact[1] = contactWorldPoint[1];
// contact[2] = contactWorldPoint[2];
// contact[3] = contactWorldPoint[0] - contactNormal[0];
// contact[4] = contactWorldPoint[1] - contactNormal[1];
// contact[5] = contactWorldPoint[2] - contactNormal[2];
// numContacts += 1;
// }
//}
function addContacts(objectA, objectB, pairContacts)
{
if (debugMode)
{
var numPairContacts = pairContacts.length;
var n;
objectB.calculateTransform(contactWorldTransform);
for (n = 0; n < numPairContacts; n += 1)
{
var pairContact = pairContacts[n];
mathDevice.m43TransformPoint(contactWorldTransform, pairContact.localPointOnB, contactWorldPoint);
var contactNormal = pairContact.worldNormalOnB;
if (numContacts >= contacts.length)
{
contacts[contacts.length] = new Float32Array(6);
}
var contact = contacts[numContacts];
contact[0] = contactWorldPoint[0];
contact[1] = contactWorldPoint[1];
contact[2] = contactWorldPoint[2];
contact[3] = contactWorldPoint[0] - contactNormal[0];
contact[4] = contactWorldPoint[1] - contactNormal[1];
contact[5] = contactWorldPoint[2] - contactNormal[2];
numContacts += 1;
}
}
}
function drawContacts()
{
if (numContacts)
{
graphicsDevice.setTechnique(contactsTechnique);
contactsTechniqueParameters.worldViewProjection = camera.viewProjectionMatrix;
graphicsDevice.setTechniqueParameters(contactsTechniqueParameters);
var writer = graphicsDevice.beginDraw(
graphicsDevice.PRIMITIVE_LINES, numContacts * 2, contactsFormats, contactsSemantics);
if (writer)
{
var n;
for (n = 0; n < numContacts; n += 1)
{
var contact = contacts[n];
writer(contact[0], contact[1], contact[2]);
writer(contact[3], contact[4], contact[5]);
}
graphicsDevice.endDraw(writer);
}
}
}
var renderFrame = function renderFrameFn()
{
var currentTime = TurbulenzEngine.time;
// Update input and camera
inputDevice.update();
if (mouseForces.pickedBody)
{
// If we're dragging a body don't apply the movement to the camera
cameraController.pitch = 0;
cameraController.turn = 0;
cameraController.step = 0;
}
cameraController.update();
var deviceWidth = graphicsDevice.width;
var deviceHeight = graphicsDevice.height;
var aspectRatio = (deviceWidth / deviceHeight);
if (aspectRatio !== camera.aspectRatio)
{
camera.aspectRatio = aspectRatio;
camera.updateProjectionMatrix();
}
camera.updateViewProjectionMatrix();
numContacts = 0;
// Update the physics
mouseForces.update(dynamicsWorld, camera, 0.1);
dynamicsWorld.update();
physicsManager.update();
scene.update();
renderer.update(graphicsDevice, camera, scene, currentTime);
if (graphicsDevice.beginFrame())
{
if (renderer.updateBuffers(graphicsDevice, deviceWidth, deviceHeight))
{
renderer.draw(graphicsDevice, clearColor);
floor.render(graphicsDevice, camera);
if (debugMode)
{
scene.drawPhysicsNodes(graphicsDevice, shaderManager, camera, physicsManager);
scene.drawPhysicsGeometry(graphicsDevice, shaderManager, camera, physicsManager);
drawContacts();
}
}
drawCrosshair();
graphicsDevice.endFrame();
}
};
var intervalID;
var loadingLoop = function loadingLoopFn()
{
if (graphicsDevice.beginFrame())
{
graphicsDevice.clear(loadingClearColor);
graphicsDevice.endFrame();
}
if (sceneLoader.complete())
{
TurbulenzEngine.clearInterval(intervalID);
camera.lookAt(cameraDefaultLook, worldUp, cameraDefaultPos);
camera.updateViewMatrix();
renderer.updateShader(shaderManager);
shader2d = shaderManager.get("shaders/generic2D.cgfx");
technique2d = shader2d.getTechnique("constantColor2D");
techniqueParameters2d = graphicsDevice.createTechniqueParameters({
clipSpace : null,
constantColor : mathDevice.v4Build(0, 0, 0, 1)
});
contactsShader = shaderManager.get("shaders/debug.cgfx");
contactsTechnique = contactsShader.getTechnique("debug_lines_constant");
contactsTechniqueParameters = graphicsDevice.createTechniqueParameters({
worldViewProjection : null,
constantColor : mathDevice.v4Build(1, 0, 0, 1)
});
// Floor physics
if (physicsManager.physicsNodes.length >= 0)
{
// Floor is represented by a plane shape
var floorShape = physicsDevice.createPlaneShape({
normal : mathDevice.v3Build(0, 1, 0),
distance : 0,
margin : 0.001
});
var floorObject = physicsDevice.createCollisionObject({
shape : floorShape,
transform : mathDevice.m43BuildIdentity(),
friction : 0.8,
restitution : 0.1,
group: physicsDevice.FILTER_STATIC,
mask: physicsDevice.FILTER_ALL,
//onPreSolveContact : addContact,
//onAddedContacts : addContacts
onProcessedContacts : addContacts
//onRemovedContacts : addContacts
});
// Adds the floor collision object to the world
dynamicsWorld.addCollisionObject(floorObject);
}
intervalID = TurbulenzEngine.setInterval(renderFrame, 1000 / 60);
}
};
intervalID = TurbulenzEngine.setInterval(loadingLoop, 1000 / 10);
// Change the clear color before we start loading assets
loadingLoop();
var postLoad = function postLoadFn()
{
var mass = 10.0;
var margin = 0.001;
duckMesh = scene.findNode("DuckMesh");
var halfExtents = duckMesh.localHalfExtents;
var xhalfExtent = halfExtents[0];
var yhalfExtent = halfExtents[1];
var halfPI = Math.PI / 2;
var xAxis = mathDevice.v3BuildXAxis();
var zAxis = mathDevice.v3BuildZAxis();
function newPhysicsNode(name, shape, offsetTransform, pos)
{
var duckGeom = duckMesh.clone(name + "Geom");
physicsManager.deletePhysicsNode(duckGeom.physicsNodes[0]);
duckGeom.physicsNodes = [];
duckGeom.setLocalTransform(offsetTransform);
var duckPhys = SceneNode.create({
name: name + "Phys",
local: pos,
dynamic: true,
disabled: false
});
var rigidBody = physicsDevice.createRigidBody({
shape : shape,
mass : mass,
inertia : mathDevice.v3ScalarMul(shape.inertia, mass),
transform : pos,
friction : 0.7,
restitution : 0.2,
angularDamping : 0.4
});
var physicsNode = {
body : rigidBody,
target : duckPhys,
dynamic : true
};
scene.addRootNode(duckPhys);
duckPhys.addChild(duckGeom);
duckPhys.physicsNodes = [physicsNode];
duckPhys.setDynamic();
physicsManager.physicsNodes.push(physicsNode);
physicsManager.dynamicPhysicsNodes.push(physicsNode);
physicsManager.enableHierarchy(duckPhys, true);
}
// Build a box duck
var shape = physicsDevice.createBoxShape({
halfExtents : halfExtents,
margin : margin
});
var position = mathDevice.m43BuildTranslation(5, yhalfExtent, 0);
newPhysicsNode("DuckBox", shape, mathDevice.m43BuildIdentity(), position);
// Build a cone duck
shape = physicsDevice.createConeShape({
height : yhalfExtent * 2,
radius : xhalfExtent,
margin : margin
});
mathDevice.m43BuildTranslation(10, yhalfExtent, 0, position);
newPhysicsNode("DuckCone", shape, mathDevice.m43BuildIdentity(), position);
// Build a cylinder duck
shape = physicsDevice.createCylinderShape({
halfExtents : [xhalfExtent, yhalfExtent, xhalfExtent],
margin : margin
});
mathDevice.m43BuildTranslation(15, yhalfExtent, 0, position);
newPhysicsNode("DuckCylinder", shape, mathDevice.m43BuildIdentity(), position);
// Build a sphere duck
shape = physicsDevice.createSphereShape({
radius : xhalfExtent,
margin : margin
});
mathDevice.m43BuildTranslation(20, yhalfExtent, 0, position);
newPhysicsNode("DuckSphere", shape, mathDevice.m43BuildIdentity(), position);
// Build a capsule duck
shape = physicsDevice.createCapsuleShape({
radius : xhalfExtent,
height : yhalfExtent * 2,
margin : margin
});
// Capsules always take their height in the Y-axis
// Rotate the capsule so it is flat against the floor
// Rotate the duck so it is facing the correct direction
mathDevice.m43BuildTranslation(25, yhalfExtent, 0, position);
mathDevice.m43SetAxisRotation(position, xAxis, halfPI);
mathDevice.m43SetAxisRotation(position, zAxis, halfPI);
newPhysicsNode("DuckCapsule", shape, mathDevice.m43FromAxisRotation(zAxis, -halfPI), position);
// Build a convex hull duck
shape = physicsDevice.createConvexHullShape({
points : duckMesh.physicsNodes[0].triangleArray.vertices,
margin : margin,
minExtent : mathDevice.v3Neg(halfExtents),
maxExtent : halfExtents
});
mathDevice.m43BuildTranslation(30, yhalfExtent, 0, position);
newPhysicsNode("DuckConvexHull", shape, mathDevice.m43BuildIdentity(), position);
// Set DuckMesh to disabled when debug rendering is enabled
// This is to prevent Z-fighting between the geometry of the triangle mesh and asset
duckMesh.setDisabled(true);
// Create a pool of boxes
var identity = mathDevice.m43BuildIdentity();
for (var i = 0; i < numBoxes; i += 1)
{
var box = physicsDevice.createRigidBody({
shape : boxShape,
mass : 1.0,
inertia : boxShape.inertia,
transform : identity,
friction : 0.9,
restitution : 0.1
});
var newBox = SceneNode.create({
name: "box" + i,
local: identity,
dynamic: true,
disabled: false
});
var physicsNode = {
body : box,
target : newBox,
dynamic : true
};
newBox.physicsNodes = [physicsNode];
scene.addRootNode(newBox);
boxes[i] = physicsNode;
}
};
var loadAssets = function loadAssetsFn()
{
// Renderer for the scene.
renderer = DefaultRendering.create(graphicsDevice,
mathDevice,
shaderManager,
effectManager);
renderer.setGlobalLightPosition(mathDevice.v3Build(0.5, 100.0, 0.5));
renderer.setAmbientColor(mathDevice.v3Build(0.3, 0.3, 0.4));
shaderManager.load("shaders/generic2D.cgfx");
// Load mesh duck
sceneLoader.load({
scene : scene,
assetPath : "models/duck_trianglemesh.dae",
graphicsDevice : graphicsDevice,
textureManager : textureManager,
effectManager : effectManager,
shaderManager : shaderManager,
physicsManager : physicsManager,
requestHandler : requestHandler,
baseMatrix : mathDevice.m43BuildTranslation(0, 0.77, 0),
append : true,
postSceneLoadFn : postLoad,
dynamic : false
});
};
var mappingTableReceived = function mappingTableReceivedFn(mappingTable)
{
textureManager.setPathRemapping(mappingTable.urlMapping, mappingTable.assetPrefix);
shaderManager.setPathRemapping(mappingTable.urlMapping, mappingTable.assetPrefix);
sceneLoader.setPathRemapping(mappingTable.urlMapping, mappingTable.assetPrefix);
loadAssets();
};
var gameSessionCreated = function gameSessionCreatedFn(gameSession)
{
mappingTable = TurbulenzServices.createMappingTable(requestHandler,
gameSession,
mappingTableReceived);
};
var gameSession = TurbulenzServices.createGameSession(requestHandler, gameSessionCreated);
// Create a scene destroy callback to run when the window is closed
function destroyScene()
{
gameSession.destroy();
TurbulenzEngine.clearInterval(intervalID);
clearColor = null;
if (scene)
{
scene.destroy();
scene = null;
}
requestHandler = null;
if (renderer)
{
renderer.destroy();
renderer = null;
}
camera = null;
if (textureManager)
{
textureManager.destroy();
textureManager = null;
}
if (shaderManager)
{
shaderManager.destroy();
shaderManager = null;
}
effectManager = null;
TurbulenzEngine.flush();
graphicsDevice = null;
mathDevice = null;
physicsDevice = null;
physicsManager = null;
dynamicsWorld = null;
mouseCodes = null;
keyCodes = null;
inputDevice = null;
cameraController = null;
floor = null;
}
TurbulenzEngine.onunload = destroyScene;
}; | the_stack |
import { Inject, Injectable, Optional } from '@angular/core';
import { BehaviorSubject } from 'rxjs';
// firebase
import firebase from "firebase/app";
import 'firebase/messaging';
import 'firebase/database';
import 'firebase/firestore';
// models
import { UserModel } from '../../models/user';
import { MessageModel } from '../../models/message';
// services
import { ConversationHandlerService } from '../abstract/conversation-handler.service';
import { LoggerService } from '../abstract/logger.service';
import { LoggerInstance } from '../logger/loggerInstance';
// utils
import { MSG_STATUS_RECEIVED, CHAT_REOPENED, CHAT_CLOSED, MEMBER_JOINED_GROUP, TYPE_DIRECT, MESSAGE_TYPE_INFO } from '../../utils/constants';
import { compareValues,searchIndexInArrayForUid,conversationMessagesRef } from '../../utils/utils';
import { messageType } from 'src/chat21-core/utils/utils-message';
// @Injectable({ providedIn: 'root' })
@Injectable()
export class FirebaseConversationHandler extends ConversationHandlerService {
// BehaviorSubject
messageAdded: BehaviorSubject<MessageModel>;
messageChanged: BehaviorSubject<MessageModel>;
messageRemoved: BehaviorSubject<string>;
isTyping: BehaviorSubject<any> = new BehaviorSubject<any>(null);
// public variables
public attributes: any;
public messages: MessageModel[];
public conversationWith: string;
// private variables
private translationMap: Map<string, string>; // LABEL_TODAY, LABEL_TOMORROW
private urlNodeFirebase: string;
private recipientId: string;
private recipientFullname: string;
private tenant: string;
private loggedUser: UserModel;
private senderId: string;
private listSubsriptions: any[];
private CLIENT_BROWSER: string;
private lastDate = '';
private logger:LoggerService = LoggerInstance.getInstance()
private ref: firebase.database.Query;
constructor(@Inject('skipMessage') private skipMessage: boolean) {
super();
}
/**
* inizializzo conversation handler
*/
initialize(recipientId: string,recipientFullName: string,loggedUser: UserModel,tenant: string,translationMap: Map<string, string>) {
this.logger.info('[FIREBASEConversationHandlerSERVICE] initWithRecipient',recipientId, recipientFullName, loggedUser, tenant, translationMap)
this.recipientId = recipientId;
this.recipientFullname = recipientFullName;
this.loggedUser = loggedUser;
if (loggedUser) {
this.senderId = loggedUser.uid;
}
this.tenant = tenant;
this.translationMap = translationMap;
this.listSubsriptions = [];
this.CLIENT_BROWSER = navigator.userAgent;
this.conversationWith = recipientId;
this.messages = [];
// this.attributes = this.setAttributes();
}
/**
* mi connetto al nodo messages
* recupero gli ultimi 100 messaggi
* creo la reference
* mi sottoscrivo a change, removed, added
*/
connect() {
this.lastDate = '';
const that = this;
this.urlNodeFirebase = conversationMessagesRef(this.tenant, this.loggedUser.uid);
this.urlNodeFirebase = this.urlNodeFirebase + this.conversationWith;
this.logger.debug('[FIREBASEConversationHandlerSERVICE] urlNodeFirebase *****', this.urlNodeFirebase);
const firebaseMessages = firebase.database().ref(this.urlNodeFirebase);
this.ref = firebaseMessages.orderByChild('timestamp').limitToLast(100);
this.ref.on('child_added', (childSnapshot) => {
that.logger.debug('[FIREBASEConversationHandlerSERVICE] >>>>>>>>>>>>>> child_added: ', childSnapshot.val())
that.added(childSnapshot);
});
this.ref.on('child_changed', (childSnapshot) => {
that.logger.debug('[FIREBASEConversationHandlerSERVICE] >>>>>>>>>>>>>> child_changed: ', childSnapshot.val())
that.changed(childSnapshot);
});
this.ref.on('child_removed', (childSnapshot) => {
that.removed(childSnapshot);
});
}
/**
* bonifico url in testo messaggio
* recupero time attuale
* recupero lingua app
* recupero senderFullname e recipientFullname
* aggiungo messaggio alla reference
* @param msg
* @param conversationWith
* @param conversationWithDetailFullname
*/
sendMessage(
msg: string,
typeMsg: string,
metadataMsg: string,
conversationWith: string,
conversationWithFullname: string,
sender: string,
senderFullname: string,
channelType: string,
attributes: any
) {
const that = this;
if (!channelType || channelType === 'undefined') {
channelType = TYPE_DIRECT;
}
const firebaseMessagesCustomUid = firebase.database().ref(this.urlNodeFirebase);
// const key = messageRef.key;
const lang = document.documentElement.lang;
const recipientFullname = conversationWithFullname;
const timestamp = firebase.database.ServerValue.TIMESTAMP
const message = new MessageModel(
'',
lang,
conversationWith,
recipientFullname,
sender,
senderFullname,
0,
metadataMsg,
msg,
timestamp,
//dateSendingMessage,
typeMsg,
attributes,
channelType,
false
);
const messageRef = firebaseMessagesCustomUid.push({
language: lang,
recipient: conversationWith,
recipient_fullname: recipientFullname,
sender: sender,
sender_fullname: senderFullname,
status: 0,
metadata: metadataMsg,
text: msg,
timestamp: firebase.database.ServerValue.TIMESTAMP,
type: typeMsg,
attributes: attributes,
channel_type: channelType
// isSender: true
});
// const message = new MessageModel(
// key,
// language, // language
// conversationWith, // recipient
// recipientFullname, // recipient_full_name
// sender, // sender
// senderFullname, // sender_full_name
// 0, // status
// metadata, // metadata
// msg, // text
// 0, // timestamp
// type, // type
// this.attributes, // attributes
// channelType, // channel_type
// true // is_sender
// );
this.logger.debug('[FIREBASEConversationHandlerSERVICE] sendMessage --> messages: ', this.messages);
this.logger.debug('[FIREBASEConversationHandlerSERVICE] sendMessage --> senderFullname: ', senderFullname);
this.logger.debug('[FIREBASEConversationHandlerSERVICE] sendMessage --> sender: ', sender);
this.logger.debug('[FIREBASEConversationHandlerSERVICE] sendMessage --> SEND MESSAGE: ', msg, channelType);
return message
}
/**
* dispose reference della conversazione
*/
dispose() {
// this.ref.off();
}
// ---------------------------------------------------------- //
// BEGIN PRIVATE FUNCTIONS
// ---------------------------------------------------------- //
/** */
// private setAttributes(): any {
// const attributes: any = {
// client: this.CLIENT_BROWSER,
// sourcePage: location.href,
// };
// if(this.loggedUser && this.loggedUser.email ){
// attributes.userEmail = this.loggedUser.email
// }
// if(this.loggedUser && this.loggedUser.fullname) {
// attributes.userFullname = this.loggedUser.fullname
// }
// // let attributes: any = JSON.parse(sessionStorage.getItem('attributes'));
// // if (!attributes || attributes === 'undefined') {
// // attributes = {
// // client: this.CLIENT_BROWSER,
// // sourcePage: location.href,
// // userEmail: this.loggedUser.email,
// // userFullname: this.loggedUser.fullname
// // };
// // this.logger.printLog('>>>>>>>>>>>>>> setAttributes: ', JSON.stringify(attributes));
// // sessionStorage.setItem('attributes', JSON.stringify(attributes));
// // }
// return attributes;
// }
/** */
private added(childSnapshot: any) {
const msg = this.messageGenerate(childSnapshot);
// msg.attributes && msg.attributes['subtype'] === 'info'
if(this.skipMessage && messageType(MESSAGE_TYPE_INFO, msg)){
return;
}
this.addRepalceMessageInArray(childSnapshot.key, msg);
this.messageAdded.next(msg);
}
/** */
private changed(childSnapshot: any) {
const msg = this.messageGenerate(childSnapshot);
// imposto il giorno del messaggio per visualizzare o nascondere l'header data
// msg.attributes && msg.attributes['subtype'] === 'info'
if(this.skipMessage && messageType(MESSAGE_TYPE_INFO, msg) ){
return;
}
this.addRepalceMessageInArray(childSnapshot.key, msg);
this.messageChanged.next(msg);
}
/** */
private removed(childSnapshot: any) {
const index = searchIndexInArrayForUid(this.messages, childSnapshot.key);
// controllo superfluo sarà sempre maggiore
if (index > -1) {
this.messages.splice(index, 1);
this.messageRemoved.next(childSnapshot.key);
}
}
/** */
private messageGenerate(childSnapshot: any) {
const msg: MessageModel = childSnapshot.val();
msg.uid = childSnapshot.key;
// controllo fatto per i gruppi da rifattorizzare
if (!msg.sender_fullname || msg.sender_fullname === 'undefined') {
msg.sender_fullname = msg.sender;
}
// bonifico messaggio da url
// if (msg.type === 'text') {
// msg.text = htmlEntities(msg.text)
// msg.text = replaceEndOfLine(msg.text)
// }
// verifico che il sender è il logged user
msg.isSender = this.isSender(msg.sender, this.loggedUser.uid);
// traduco messaggi se sono del server
if (msg.attributes && msg.attributes.subtype) {
if (msg.attributes.subtype === 'info' || msg.attributes.subtype === 'info/support') {
this.translateInfoSupportMessages(msg);
}
}
/// commented because NOW ATTRIBUTES COMES FROM OUTSIDE
// if (msg.attributes && msg.attributes.projectId) {
// this.attributes.projectId = msg.attributes.projectId;
// // sessionStorage.setItem('attributes', JSON.stringify(attributes));
// }
return msg;
}
/** */
private addRepalceMessageInArray(key: string, msg: MessageModel) {
const index = searchIndexInArrayForUid(this.messages, key);
if (index > -1) {
this.messages.splice(index, 1, msg);
} else {
this.messages.splice(0, 0, msg);
}
this.messages.sort(compareValues('timestamp', 'asc'));
// aggiorno stato messaggio, questo stato indica che è stato consegnato al client e NON che è stato letto
this.setStatusMessage(msg, this.conversationWith);
}
/** */
private translateInfoSupportMessages(message: MessageModel) {
// check if the message attributes has parameters and it is of the "MEMBER_JOINED_GROUP" type
const INFO_SUPPORT_USER_ADDED_SUBJECT = this.translationMap.get('INFO_SUPPORT_USER_ADDED_SUBJECT');
const INFO_SUPPORT_USER_ADDED_YOU_VERB = this.translationMap.get('INFO_SUPPORT_USER_ADDED_YOU_VERB');
const INFO_SUPPORT_USER_ADDED_COMPLEMENT = this.translationMap.get('INFO_SUPPORT_USER_ADDED_COMPLEMENT');
const INFO_SUPPORT_USER_ADDED_VERB = this.translationMap.get('INFO_SUPPORT_USER_ADDED_VERB');
const INFO_SUPPORT_CHAT_REOPENED = this.translationMap.get('INFO_SUPPORT_CHAT_REOPENED');
const INFO_SUPPORT_CHAT_CLOSED = this.translationMap.get('INFO_SUPPORT_CHAT_CLOSED');
if (message.attributes.messagelabel
&& message.attributes.messagelabel.parameters
&& message.attributes.messagelabel.key === MEMBER_JOINED_GROUP
) {
let subject: string;
let verb: string;
let complement: string;
if (message.attributes.messagelabel.parameters.member_id === this.loggedUser.uid) {
subject = INFO_SUPPORT_USER_ADDED_SUBJECT;
verb = INFO_SUPPORT_USER_ADDED_YOU_VERB;
complement = INFO_SUPPORT_USER_ADDED_COMPLEMENT;
} else {
if (message.attributes.messagelabel.parameters.fullname) {
// other user has been added to the group (and he has a fullname)
subject = message.attributes.messagelabel.parameters.fullname;
verb = INFO_SUPPORT_USER_ADDED_VERB;
complement = INFO_SUPPORT_USER_ADDED_COMPLEMENT;
} else {
// other user has been added to the group (and he has not a fullname, so use hes useruid)
subject = message.attributes.messagelabel.parameters.member_id;
verb = INFO_SUPPORT_USER_ADDED_VERB;
complement = INFO_SUPPORT_USER_ADDED_COMPLEMENT;
}
}
message.text = subject + ' ' + verb + ' ' + complement;
} else if ((message.attributes.messagelabel && message.attributes.messagelabel.key === CHAT_REOPENED)) {
message.text = INFO_SUPPORT_CHAT_REOPENED;
} else if ((message.attributes.messagelabel && message.attributes.messagelabel.key === CHAT_CLOSED)) {
message.text = INFO_SUPPORT_CHAT_CLOSED;
}
}
/**
* aggiorno lo stato del messaggio
* questo stato indica che è stato consegnato al client e NON che è stato letto
* se il messaggio NON è stato inviato da loggedUser AGGIORNO stato a 200
* @param item
* @param conversationWith
*/
private setStatusMessage(msg: MessageModel, conversationWith: string) {
if (msg.status < MSG_STATUS_RECEIVED) {
if (msg.sender !== this.loggedUser.uid && msg.status < MSG_STATUS_RECEIVED) {
const urlNodeMessagesUpdate = this.urlNodeFirebase + '/' + msg.uid;
this.logger.debug('[FIREBASEConversationHandlerSERVICE] update message status', urlNodeMessagesUpdate);
firebase.database().ref(urlNodeMessagesUpdate).update({ status: MSG_STATUS_RECEIVED });
}
}
}
/**
* controllo se il messaggio è stato inviato da loggerUser
* richiamato dalla pagina elenco messaggi della conversazione
*/
private isSender(sender: string, currentUserId: string) {
if (currentUserId) {
if (sender === currentUserId) {
return true;
} else {
return false;
}
} else {
return false;
}
}
/** */
// updateMetadataMessage(uid: string, metadata: any) {
// metadata.status = true;
// const message = {
// metadata: metadata
// };
// const firebaseMessages = firebase.database().ref(this.urlNodeFirebase + uid);
// firebaseMessages.set(message);
// }
unsubscribe(key: string) {
this.listSubsriptions.forEach(sub => {
this.logger.debug('[FIREBASEConversationHandlerSERVICE] unsubscribe: ', sub.uid, key);
if (sub.uid === key) {
this.logger.debug('[FIREBASEConversationHandlerSERVICE] unsubscribe: ', sub.uid, key);
sub.unsubscribe(key, null);
return;
}
});
}
} | the_stack |
import { Signal } from './base/Signal';
import { parseUri } from './base/parseUri';
import { IResourceMetadata, LoaderResource } from './LoaderResource';
import { AsyncQueue } from './base/AsyncQueue';
import { Dict } from '@pixi/utils';
// some constants
const MAX_PROGRESS = 100;
const rgxExtractUrlHash = /(#[\w-]+)?$/;
export type ILoaderMiddleware = (resource: LoaderResource, next: (...args: any[]) => void) => void;
export interface ILoaderAdd {
(this: Loader, name: string, url: string, callback?: LoaderResource.OnCompleteSignal): Loader;
(this: Loader, name: string, url: string, options?: IAddOptions, callback?: LoaderResource.OnCompleteSignal): Loader;
(this: Loader, url: string, callback?: LoaderResource.OnCompleteSignal): Loader;
(this: Loader, url: string, options?: IAddOptions, callback?: LoaderResource.OnCompleteSignal): Loader;
(this: Loader, options: IAddOptions, callback?: LoaderResource.OnCompleteSignal): Loader;
(this: Loader, resources: (IAddOptions | string)[], callback?: LoaderResource.OnCompleteSignal): Loader;
}
/**
* Options for a call to `.add()`.
*
* @see Loader#add
*
* @property name - The name of the resource to load, if not passed the url is used.
* @property key - Alias for `name`.
* @property url - The url for this resource, relative to the baseUrl of this loader.
* @property crossOrigin - Is this request cross-origin? Default is to determine automatically.
* @property timeout=0 - A timeout in milliseconds for the load. If the load takes longer
* than this time it is cancelled and the load is considered a failure. If this value is
* set to `0` then there is no explicit timeout.
* @property loadType=LoaderResource.LOAD_TYPE.XHR - How should this resource be loaded?
* @property xhrType=LoaderResource.XHR_RESPONSE_TYPE.DEFAULT - How should the data being
* loaded be interpreted when using XHR?
* @property onComplete - Callback to add an an onComplete signal istener.
* @property callback - Alias for `onComplete`.
* @property metadata - Extra configuration for middleware and the Resource object.
*/
export interface IAddOptions {
name?: string;
key?: string;
url?: string;
crossOrigin?: string | boolean;
timeout?: number;
parentResource?: LoaderResource;
loadType?: LoaderResource.LOAD_TYPE;
xhrType?: LoaderResource.XHR_RESPONSE_TYPE;
onComplete?: LoaderResource.OnCompleteSignal;
callback?: LoaderResource.OnCompleteSignal;
metadata?: IResourceMetadata;
}
/**
* The new loader, forked from Resource Loader by Chad Engler: https://github.com/englercj/resource-loader
*
* ```js
* const loader = PIXI.Loader.shared; // PixiJS exposes a premade instance for you to use.
* // or
* const loader = new PIXI.Loader(); // You can also create your own if you want
*
* const sprites = {};
*
* // Chainable `add` to enqueue a resource
* loader.add('bunny', 'data/bunny.png')
* .add('spaceship', 'assets/spritesheet.json');
* loader.add('scoreFont', 'assets/score.fnt');
*
* // Chainable `pre` to add a middleware that runs for each resource, *before* loading that resource.
* // This is useful to implement custom caching modules (using filesystem, indexeddb, memory, etc).
* loader.pre(cachingMiddleware);
*
* // Chainable `use` to add a middleware that runs for each resource, *after* loading that resource.
* // This is useful to implement custom parsing modules (like spritesheet parsers, spine parser, etc).
* loader.use(parsingMiddleware);
*
* // The `load` method loads the queue of resources, and calls the passed in callback called once all
* // resources have loaded.
* loader.load((loader, resources) => {
* // resources is an object where the key is the name of the resource loaded and the value is the resource object.
* // They have a couple default properties:
* // - `url`: The URL that the resource was loaded from
* // - `error`: The error that happened when trying to load (if any)
* // - `data`: The raw data that was loaded
* // also may contain other properties based on the middleware that runs.
* sprites.bunny = new PIXI.TilingSprite(resources.bunny.texture);
* sprites.spaceship = new PIXI.TilingSprite(resources.spaceship.texture);
* sprites.scoreFont = new PIXI.TilingSprite(resources.scoreFont.texture);
* });
*
* // throughout the process multiple signals can be dispatched.
* loader.onProgress.add(() => {}); // called once per loaded/errored file
* loader.onError.add(() => {}); // called once per errored file
* loader.onLoad.add(() => {}); // called once per loaded file
* loader.onComplete.add(() => {}); // called once when the queued resources all load.
* ```
*
* @memberof PIXI
*/
class Loader
{
/**
* The base url for all resources loaded by this loader.
*/
baseUrl: string;
/**
* The progress percent of the loader going through the queue.
*/
progress = 0;
/**
* Loading state of the loader, true if it is currently loading resources.
*/
loading = false;
/**
* A querystring to append to every URL added to the loader.
*
* This should be a valid query string *without* the question-mark (`?`). The loader will
* also *not* escape values for you. Make sure to escape your parameters with
* [`encodeURIComponent`](https://mdn.io/encodeURIComponent) before assigning this property.
*
* @example
* const loader = new Loader();
*
* loader.defaultQueryString = 'user=me&password=secret';
*
* // This will request 'image.png?user=me&password=secret'
* loader.add('image.png').load();
*
* loader.reset();
*
* // This will request 'image.png?v=1&user=me&password=secret'
* loader.add('iamge.png?v=1').load();
*/
defaultQueryString = '';
/**
* The middleware to run before loading each resource.
*/
private _beforeMiddleware: Array<ILoaderMiddleware> = [];
/**
* The middleware to run after loading each resource.
*/
private _afterMiddleware: Array<ILoaderMiddleware> = [];
/**
* The tracks the resources we are currently completing parsing for.
*/
private _resourcesParsing: Array<LoaderResource> = [];
/**
* The `_loadResource` function bound with this object context.
*
* @param r - The resource to load
* @param d - The dequeue function
*/
private _boundLoadResource = (r: LoaderResource, d: () => void): void => this._loadResource(r, d);
/**
* The resources waiting to be loaded.
*/
private _queue: AsyncQueue<any>;
/**
* All the resources for this loader keyed by name.
*/
resources: Dict<LoaderResource> = {};
/**
* Dispatched once per loaded or errored resource.
*/
onProgress: Signal<Loader.OnProgressSignal>;
/**
* Dispatched once per errored resource.
*/
onError: Signal<Loader.OnErrorSignal>;
/**
* Dispatched once per loaded resource.
*/
onLoad: Signal<Loader.OnLoadSignal>;
/**
* Dispatched when the loader begins to process the queue.
*/
onStart: Signal<Loader.OnStartSignal>;
/**
* Dispatched when the queued resources all load.
*/
onComplete: Signal<Loader.OnCompleteSignal>;
/**
* @param baseUrl - The base url for all resources loaded by this loader.
* @param concurrency - The number of resources to load concurrently.
*/
constructor(baseUrl = '', concurrency = 10)
{
this.baseUrl = baseUrl;
this._beforeMiddleware = [];
this._afterMiddleware = [];
this._resourcesParsing = [];
this._boundLoadResource = (r, d) => this._loadResource(r, d);
this._queue = AsyncQueue.queue(this._boundLoadResource, concurrency);
this._queue.pause();
this.resources = {};
this.onProgress = new Signal();
this.onError = new Signal();
this.onLoad = new Signal();
this.onStart = new Signal();
this.onComplete = new Signal();
for (let i = 0; i < Loader._plugins.length; ++i)
{
const plugin = Loader._plugins[i];
const { pre, use } = plugin;
if (pre)
{
this.pre(pre);
}
if (use)
{
this.use(use);
}
}
this._protected = false;
}
/**
* Adds a resource (or multiple resources) to the loader queue.
*
* This function can take a wide variety of different parameters. The only thing that is always
* required the url to load. All the following will work:
*
* ```js
* loader
* // normal param syntax
* .add('key', 'http://...', function () {})
* .add('http://...', function () {})
* .add('http://...')
*
* // object syntax
* .add({
* name: 'key2',
* url: 'http://...'
* }, function () {})
* .add({
* url: 'http://...'
* }, function () {})
* .add({
* name: 'key3',
* url: 'http://...'
* onComplete: function () {}
* })
* .add({
* url: 'https://...',
* onComplete: function () {},
* crossOrigin: true
* })
*
* // you can also pass an array of objects or urls or both
* .add([
* { name: 'key4', url: 'http://...', onComplete: function () {} },
* { url: 'http://...', onComplete: function () {} },
* 'http://...'
* ])
*
* // and you can use both params and options
* .add('key', 'http://...', { crossOrigin: true }, function () {})
* .add('http://...', { crossOrigin: true }, function () {});
* ```
*/
add: ILoaderAdd;
/**
* Same as add, params have strict order
*
* @private
* @param name - The name of the resource to load.
* @param url - The url for this resource, relative to the baseUrl of this loader.
* @param options - The options for the load.
* @param callback - Function to call when this specific resource completes loading.
* @return The loader itself.
*/
protected _add(name: string, url: string, options: IAddOptions, callback?: LoaderResource.OnCompleteSignal): this
{
// if loading already you can only add resources that have a parent.
if (this.loading && (!options || !options.parentResource))
{
throw new Error('Cannot add resources while the loader is running.');
}
// check if resource already exists.
if (this.resources[name])
{
throw new Error(`Resource named "${name}" already exists.`);
}
// add base url if this isn't an absolute url
url = this._prepareUrl(url);
// create the store the resource
this.resources[name] = new LoaderResource(name, url, options);
if (typeof callback === 'function')
{
this.resources[name].onAfterMiddleware.once(callback);
}
// if actively loading, make sure to adjust progress chunks for that parent and its children
if (this.loading)
{
const parent = options.parentResource;
const incompleteChildren = [];
for (let i = 0; i < parent.children.length; ++i)
{
if (!parent.children[i].isComplete)
{
incompleteChildren.push(parent.children[i]);
}
}
const fullChunk = parent.progressChunk * (incompleteChildren.length + 1); // +1 for parent
const eachChunk = fullChunk / (incompleteChildren.length + 2); // +2 for parent & new child
parent.children.push(this.resources[name]);
parent.progressChunk = eachChunk;
for (let i = 0; i < incompleteChildren.length; ++i)
{
incompleteChildren[i].progressChunk = eachChunk;
}
this.resources[name].progressChunk = eachChunk;
}
// add the resource to the queue
this._queue.push(this.resources[name]);
return this;
}
/* eslint-enable require-jsdoc,valid-jsdoc */
/**
* Sets up a middleware function that will run *before* the
* resource is loaded.
*
* @param fn - The middleware function to register.
* @return The loader itself.
*/
pre(fn: ILoaderMiddleware): this
{
this._beforeMiddleware.push(fn);
return this;
}
/**
* Sets up a middleware function that will run *after* the
* resource is loaded.
*
* @param fn - The middleware function to register.
* @return The loader itself.
*/
use(fn: ILoaderMiddleware): this
{
this._afterMiddleware.push(fn);
return this;
}
/**
* Resets the queue of the loader to prepare for a new load.
*
* @return The loader itself.
*/
reset(): this
{
this.progress = 0;
this.loading = false;
this._queue.kill();
this._queue.pause();
// abort all resource loads
for (const k in this.resources)
{
const res = this.resources[k];
if (res._onLoadBinding)
{
res._onLoadBinding.detach();
}
if (res.isLoading)
{
res.abort('loader reset');
}
}
this.resources = {};
return this;
}
/**
* Starts loading the queued resources.
* @param cb - Optional callback that will be bound to the `complete` event.
* @return The loader itself.
*/
load(cb?: Loader.OnCompleteSignal): this
{
// register complete callback if they pass one
if (typeof cb === 'function')
{
this.onComplete.once(cb);
}
// if the queue has already started we are done here
if (this.loading)
{
return this;
}
if (this._queue.idle())
{
this._onStart();
this._onComplete();
}
else
{
// distribute progress chunks
const numTasks = this._queue._tasks.length;
const chunk = MAX_PROGRESS / numTasks;
for (let i = 0; i < this._queue._tasks.length; ++i)
{
this._queue._tasks[i].data.progressChunk = chunk;
}
// notify we are starting
this._onStart();
// start loading
this._queue.resume();
}
return this;
}
/**
* The number of resources to load concurrently.
*
* @default 10
*/
get concurrency(): number
{
return this._queue.concurrency;
}
set concurrency(concurrency: number)
{
this._queue.concurrency = concurrency;
}
/**
* Prepares a url for usage based on the configuration of this object
* @param url - The url to prepare.
* @return The prepared url.
*/
private _prepareUrl(url: string): string
{
const parsedUrl = parseUri(url, { strictMode: true });
let result;
// absolute url, just use it as is.
if (parsedUrl.protocol || !parsedUrl.path || url.indexOf('//') === 0)
{
result = url;
}
// if baseUrl doesn't end in slash and url doesn't start with slash, then add a slash inbetween
else if (this.baseUrl.length
&& this.baseUrl.lastIndexOf('/') !== this.baseUrl.length - 1
&& url.charAt(0) !== '/'
)
{
result = `${this.baseUrl}/${url}`;
}
else
{
result = this.baseUrl + url;
}
// if we need to add a default querystring, there is a bit more work
if (this.defaultQueryString)
{
const hash = rgxExtractUrlHash.exec(result)[0];
result = result.substr(0, result.length - hash.length);
if (result.indexOf('?') !== -1)
{
result += `&${this.defaultQueryString}`;
}
else
{
result += `?${this.defaultQueryString}`;
}
result += hash;
}
return result;
}
/**
* Loads a single resource.
*
* @param resource - The resource to load.
* @param dequeue - The function to call when we need to dequeue this item.
*/
private _loadResource(resource: LoaderResource, dequeue: () => void): void
{
resource._dequeue = dequeue;
// run before middleware
AsyncQueue.eachSeries(
this._beforeMiddleware,
(fn: any, next: (...args: any) => void) =>
{
fn.call(this, resource, () =>
{
// if the before middleware marks the resource as complete,
// break and don't process any more before middleware
next(resource.isComplete ? {} : null);
});
},
() =>
{
if (resource.isComplete)
{
this._onLoad(resource);
}
else
{
resource._onLoadBinding = resource.onComplete.once(this._onLoad, this);
resource.load();
}
},
true
);
}
/**
* Called once loading has started.
*/
private _onStart(): void
{
this.progress = 0;
this.loading = true;
this.onStart.dispatch(this);
}
/**
* Called once each resource has loaded.
*/
private _onComplete(): void
{
this.progress = MAX_PROGRESS;
this.loading = false;
this.onComplete.dispatch(this, this.resources);
}
/**
* Called each time a resources is loaded.
* @param resource - The resource that was loaded
*/
private _onLoad(resource: LoaderResource): void
{
resource._onLoadBinding = null;
// remove this resource from the async queue, and add it to our list of resources that are being parsed
this._resourcesParsing.push(resource);
resource._dequeue();
// run all the after middleware for this resource
AsyncQueue.eachSeries(
this._afterMiddleware,
(fn: any, next: any) =>
{
fn.call(this, resource, next);
},
() =>
{
resource.onAfterMiddleware.dispatch(resource);
this.progress = Math.min(MAX_PROGRESS, this.progress + resource.progressChunk);
this.onProgress.dispatch(this, resource);
if (resource.error)
{
this.onError.dispatch(resource.error, this, resource);
}
else
{
this.onLoad.dispatch(this, resource);
}
this._resourcesParsing.splice(this._resourcesParsing.indexOf(resource), 1);
// do completion check
if (this._queue.idle() && this._resourcesParsing.length === 0)
{
this._onComplete();
}
},
true
);
}
private static _plugins: Array<ILoaderPlugin> = [];
private static _shared: Loader;
/**
* If this loader cannot be destroyed.
* @default false
*/
private _protected: boolean;
/**
* Destroy the loader, removes references.
*/
public destroy(): void
{
if (!this._protected)
{
this.reset();
}
}
/**
* A premade instance of the loader that can be used to load resources.
*/
public static get shared(): Loader
{
let shared = Loader._shared;
if (!shared)
{
shared = new Loader();
shared._protected = true;
Loader._shared = shared;
}
return shared;
}
/**
* Adds a Loader plugin for the global shared loader and all
* new Loader instances created.
*
* @param plugin - The plugin to add
* @return Reference to PIXI.Loader for chaining
*/
public static registerPlugin(plugin: ILoaderPlugin): typeof Loader
{
Loader._plugins.push(plugin);
if (plugin.add)
{
plugin.add();
}
return Loader;
}
}
Loader.prototype.add = function add(this: Loader, name: any, url?: any, options?: any, callback?: any): Loader
{
// special case of an array of objects or urls
if (Array.isArray(name))
{
for (let i = 0; i < name.length; ++i)
{
this.add((name as any)[i]);
}
return this;
}
// if an object is passed instead of params
if (typeof name === 'object')
{
options = name;
callback = (url as any) || options.callback || options.onComplete;
url = options.url;
name = options.name || options.key || options.url;
}
// case where no name is passed shift all args over by one.
if (typeof url !== 'string')
{
callback = options as any;
options = url;
url = name;
}
// now that we shifted make sure we have a proper url.
if (typeof url !== 'string')
{
throw new Error('No url passed to add resource to loader.');
}
// options are optional so people might pass a function and no options
if (typeof options === 'function')
{
callback = options;
options = null;
}
return this._add(name, url, options, callback);
};
// eslint-disable-next-line @typescript-eslint/no-namespace
namespace Loader
{
/**
* When the resource starts to load.
* @param resource - The resource that the event happened on.
*/
export type OnStartSignal = (loader: Loader) => void;
/**
* When the progress changes the loader and resource are dispatched.
* @param loader - The loader the progress is advancing on.
* @param resource - The resource that has completed or failed to cause the progress to advance.
*/
export type OnProgressSignal = (loader: Loader, resource: LoaderResource) => void;
/**
* When a load completes without error the loader and resource are dispatched.
* @param loader - The loader that has started loading resources.
* @param resource - The resource that has completed.
*/
export type OnLoadSignal = (loader: Loader, resource: LoaderResource) => void;
/**
* When the loader starts loading resources it dispatches this callback.
* @param loader - The loader that has started loading resources.
*/
export type OnCompleteSignal = (loader: Loader, resources: Dict<LoaderResource>) => void;
/**
* When an error occurs the loader and resource are dispatched.
* @param loader - The loader the error happened in.
* @param resource - The resource that caused the error.
*/
export type OnErrorSignal = (error: Error, loader: Loader, resource: LoaderResource) => void;
}
export { Loader };
/**
* Plugin to be installed for handling specific Loader resources.
*
* @property add - Function to call immediate after registering plugin.
* @property pre - Middleware function to run before load, the
* arguments for this are `(resource, next)`
* @property use - Middleware function to run after load, the
* arguments for this are `(resource, next)`
*/
export interface ILoaderPlugin {
/**
* Function to call immediate after registering plugin.
*/
add?(): void;
/**
* Middleware function to run before load
* @param resource - resource
* @param next - next middleware
*/
pre?(resource: LoaderResource, next: (...args: any[]) => void): void;
/**
* Middleware function to run after load
* @param resource - resource
* @param next - next middleware
*/
use?(resource: LoaderResource, next: (...args: any[]) => void): void;
} | the_stack |
import * as msRest from "@azure/ms-rest-js";
import * as Models from "../models";
import * as Mappers from "../models/commitmentPlansMappers";
import * as Parameters from "../models/parameters";
import { AzureMLCommitmentPlansManagementClientContext } from "../azureMLCommitmentPlansManagementClientContext";
/** Class representing a CommitmentPlans. */
export class CommitmentPlans {
private readonly client: AzureMLCommitmentPlansManagementClientContext;
/**
* Create a CommitmentPlans.
* @param {AzureMLCommitmentPlansManagementClientContext} client Reference to the service client.
*/
constructor(client: AzureMLCommitmentPlansManagementClientContext) {
this.client = client;
}
/**
* Retrieve an Azure ML commitment plan by its subscription, resource group and name.
* @param resourceGroupName The resource group name.
* @param commitmentPlanName The Azure ML commitment plan name.
* @param [options] The optional parameters
* @returns Promise<Models.CommitmentPlansGetResponse>
*/
get(resourceGroupName: string, commitmentPlanName: string, options?: msRest.RequestOptionsBase): Promise<Models.CommitmentPlansGetResponse>;
/**
* @param resourceGroupName The resource group name.
* @param commitmentPlanName The Azure ML commitment plan name.
* @param callback The callback
*/
get(resourceGroupName: string, commitmentPlanName: string, callback: msRest.ServiceCallback<Models.CommitmentPlan>): void;
/**
* @param resourceGroupName The resource group name.
* @param commitmentPlanName The Azure ML commitment plan name.
* @param options The optional parameters
* @param callback The callback
*/
get(resourceGroupName: string, commitmentPlanName: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.CommitmentPlan>): void;
get(resourceGroupName: string, commitmentPlanName: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.CommitmentPlan>, callback?: msRest.ServiceCallback<Models.CommitmentPlan>): Promise<Models.CommitmentPlansGetResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
commitmentPlanName,
options
},
getOperationSpec,
callback) as Promise<Models.CommitmentPlansGetResponse>;
}
/**
* Create a new Azure ML commitment plan resource or updates an existing one.
* @param createOrUpdatePayload The payload to create or update the Azure ML commitment plan.
* @param resourceGroupName The resource group name.
* @param commitmentPlanName The Azure ML commitment plan name.
* @param [options] The optional parameters
* @returns Promise<Models.CommitmentPlansCreateOrUpdateResponse>
*/
createOrUpdate(createOrUpdatePayload: Models.CommitmentPlan, resourceGroupName: string, commitmentPlanName: string, options?: msRest.RequestOptionsBase): Promise<Models.CommitmentPlansCreateOrUpdateResponse>;
/**
* @param createOrUpdatePayload The payload to create or update the Azure ML commitment plan.
* @param resourceGroupName The resource group name.
* @param commitmentPlanName The Azure ML commitment plan name.
* @param callback The callback
*/
createOrUpdate(createOrUpdatePayload: Models.CommitmentPlan, resourceGroupName: string, commitmentPlanName: string, callback: msRest.ServiceCallback<Models.CommitmentPlan>): void;
/**
* @param createOrUpdatePayload The payload to create or update the Azure ML commitment plan.
* @param resourceGroupName The resource group name.
* @param commitmentPlanName The Azure ML commitment plan name.
* @param options The optional parameters
* @param callback The callback
*/
createOrUpdate(createOrUpdatePayload: Models.CommitmentPlan, resourceGroupName: string, commitmentPlanName: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.CommitmentPlan>): void;
createOrUpdate(createOrUpdatePayload: Models.CommitmentPlan, resourceGroupName: string, commitmentPlanName: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.CommitmentPlan>, callback?: msRest.ServiceCallback<Models.CommitmentPlan>): Promise<Models.CommitmentPlansCreateOrUpdateResponse> {
return this.client.sendOperationRequest(
{
createOrUpdatePayload,
resourceGroupName,
commitmentPlanName,
options
},
createOrUpdateOperationSpec,
callback) as Promise<Models.CommitmentPlansCreateOrUpdateResponse>;
}
/**
* Remove an existing Azure ML commitment plan.
* @param resourceGroupName The resource group name.
* @param commitmentPlanName The Azure ML commitment plan name.
* @param [options] The optional parameters
* @returns Promise<msRest.RestResponse>
*/
remove(resourceGroupName: string, commitmentPlanName: string, options?: msRest.RequestOptionsBase): Promise<msRest.RestResponse>;
/**
* @param resourceGroupName The resource group name.
* @param commitmentPlanName The Azure ML commitment plan name.
* @param callback The callback
*/
remove(resourceGroupName: string, commitmentPlanName: string, callback: msRest.ServiceCallback<void>): void;
/**
* @param resourceGroupName The resource group name.
* @param commitmentPlanName The Azure ML commitment plan name.
* @param options The optional parameters
* @param callback The callback
*/
remove(resourceGroupName: string, commitmentPlanName: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<void>): void;
remove(resourceGroupName: string, commitmentPlanName: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<void>, callback?: msRest.ServiceCallback<void>): Promise<msRest.RestResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
commitmentPlanName,
options
},
removeOperationSpec,
callback);
}
/**
* Patch an existing Azure ML commitment plan resource.
* @param patchPayload The payload to use to patch the Azure ML commitment plan. Only tags and SKU
* may be modified on an existing commitment plan.
* @param resourceGroupName The resource group name.
* @param commitmentPlanName The Azure ML commitment plan name.
* @param [options] The optional parameters
* @returns Promise<Models.CommitmentPlansPatchResponse>
*/
patch(patchPayload: Models.CommitmentPlanPatchPayload, resourceGroupName: string, commitmentPlanName: string, options?: msRest.RequestOptionsBase): Promise<Models.CommitmentPlansPatchResponse>;
/**
* @param patchPayload The payload to use to patch the Azure ML commitment plan. Only tags and SKU
* may be modified on an existing commitment plan.
* @param resourceGroupName The resource group name.
* @param commitmentPlanName The Azure ML commitment plan name.
* @param callback The callback
*/
patch(patchPayload: Models.CommitmentPlanPatchPayload, resourceGroupName: string, commitmentPlanName: string, callback: msRest.ServiceCallback<Models.CommitmentPlan>): void;
/**
* @param patchPayload The payload to use to patch the Azure ML commitment plan. Only tags and SKU
* may be modified on an existing commitment plan.
* @param resourceGroupName The resource group name.
* @param commitmentPlanName The Azure ML commitment plan name.
* @param options The optional parameters
* @param callback The callback
*/
patch(patchPayload: Models.CommitmentPlanPatchPayload, resourceGroupName: string, commitmentPlanName: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.CommitmentPlan>): void;
patch(patchPayload: Models.CommitmentPlanPatchPayload, resourceGroupName: string, commitmentPlanName: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.CommitmentPlan>, callback?: msRest.ServiceCallback<Models.CommitmentPlan>): Promise<Models.CommitmentPlansPatchResponse> {
return this.client.sendOperationRequest(
{
patchPayload,
resourceGroupName,
commitmentPlanName,
options
},
patchOperationSpec,
callback) as Promise<Models.CommitmentPlansPatchResponse>;
}
/**
* Retrieve all Azure ML commitment plans in a subscription.
* @param [options] The optional parameters
* @returns Promise<Models.CommitmentPlansListResponse>
*/
list(options?: Models.CommitmentPlansListOptionalParams): Promise<Models.CommitmentPlansListResponse>;
/**
* @param callback The callback
*/
list(callback: msRest.ServiceCallback<Models.CommitmentPlanListResult>): void;
/**
* @param options The optional parameters
* @param callback The callback
*/
list(options: Models.CommitmentPlansListOptionalParams, callback: msRest.ServiceCallback<Models.CommitmentPlanListResult>): void;
list(options?: Models.CommitmentPlansListOptionalParams | msRest.ServiceCallback<Models.CommitmentPlanListResult>, callback?: msRest.ServiceCallback<Models.CommitmentPlanListResult>): Promise<Models.CommitmentPlansListResponse> {
return this.client.sendOperationRequest(
{
options
},
listOperationSpec,
callback) as Promise<Models.CommitmentPlansListResponse>;
}
/**
* Retrieve all Azure ML commitment plans in a resource group.
* @param resourceGroupName The resource group name.
* @param [options] The optional parameters
* @returns Promise<Models.CommitmentPlansListInResourceGroupResponse>
*/
listInResourceGroup(resourceGroupName: string, options?: Models.CommitmentPlansListInResourceGroupOptionalParams): Promise<Models.CommitmentPlansListInResourceGroupResponse>;
/**
* @param resourceGroupName The resource group name.
* @param callback The callback
*/
listInResourceGroup(resourceGroupName: string, callback: msRest.ServiceCallback<Models.CommitmentPlanListResult>): void;
/**
* @param resourceGroupName The resource group name.
* @param options The optional parameters
* @param callback The callback
*/
listInResourceGroup(resourceGroupName: string, options: Models.CommitmentPlansListInResourceGroupOptionalParams, callback: msRest.ServiceCallback<Models.CommitmentPlanListResult>): void;
listInResourceGroup(resourceGroupName: string, options?: Models.CommitmentPlansListInResourceGroupOptionalParams | msRest.ServiceCallback<Models.CommitmentPlanListResult>, callback?: msRest.ServiceCallback<Models.CommitmentPlanListResult>): Promise<Models.CommitmentPlansListInResourceGroupResponse> {
return this.client.sendOperationRequest(
{
resourceGroupName,
options
},
listInResourceGroupOperationSpec,
callback) as Promise<Models.CommitmentPlansListInResourceGroupResponse>;
}
/**
* Retrieve all Azure ML commitment plans in a subscription.
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param [options] The optional parameters
* @returns Promise<Models.CommitmentPlansListNextResponse>
*/
listNext(nextPageLink: string, options?: msRest.RequestOptionsBase): Promise<Models.CommitmentPlansListNextResponse>;
/**
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param callback The callback
*/
listNext(nextPageLink: string, callback: msRest.ServiceCallback<Models.CommitmentPlanListResult>): void;
/**
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param options The optional parameters
* @param callback The callback
*/
listNext(nextPageLink: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.CommitmentPlanListResult>): void;
listNext(nextPageLink: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.CommitmentPlanListResult>, callback?: msRest.ServiceCallback<Models.CommitmentPlanListResult>): Promise<Models.CommitmentPlansListNextResponse> {
return this.client.sendOperationRequest(
{
nextPageLink,
options
},
listNextOperationSpec,
callback) as Promise<Models.CommitmentPlansListNextResponse>;
}
/**
* Retrieve all Azure ML commitment plans in a resource group.
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param [options] The optional parameters
* @returns Promise<Models.CommitmentPlansListInResourceGroupNextResponse>
*/
listInResourceGroupNext(nextPageLink: string, options?: msRest.RequestOptionsBase): Promise<Models.CommitmentPlansListInResourceGroupNextResponse>;
/**
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param callback The callback
*/
listInResourceGroupNext(nextPageLink: string, callback: msRest.ServiceCallback<Models.CommitmentPlanListResult>): void;
/**
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param options The optional parameters
* @param callback The callback
*/
listInResourceGroupNext(nextPageLink: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.CommitmentPlanListResult>): void;
listInResourceGroupNext(nextPageLink: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.CommitmentPlanListResult>, callback?: msRest.ServiceCallback<Models.CommitmentPlanListResult>): Promise<Models.CommitmentPlansListInResourceGroupNextResponse> {
return this.client.sendOperationRequest(
{
nextPageLink,
options
},
listInResourceGroupNextOperationSpec,
callback) as Promise<Models.CommitmentPlansListInResourceGroupNextResponse>;
}
}
// Operation Specifications
const serializer = new msRest.Serializer(Mappers);
const getOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearning/commitmentPlans/{commitmentPlanName}",
urlParameters: [
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.commitmentPlanName
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.CommitmentPlan
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const createOrUpdateOperationSpec: msRest.OperationSpec = {
httpMethod: "PUT",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearning/commitmentPlans/{commitmentPlanName}",
urlParameters: [
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.commitmentPlanName
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
requestBody: {
parameterPath: "createOrUpdatePayload",
mapper: {
...Mappers.CommitmentPlan,
required: true
}
},
responses: {
200: {
bodyMapper: Mappers.CommitmentPlan
},
201: {
bodyMapper: Mappers.CommitmentPlan
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const removeOperationSpec: msRest.OperationSpec = {
httpMethod: "DELETE",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearning/commitmentPlans/{commitmentPlanName}",
urlParameters: [
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.commitmentPlanName
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const patchOperationSpec: msRest.OperationSpec = {
httpMethod: "PATCH",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearning/commitmentPlans/{commitmentPlanName}",
urlParameters: [
Parameters.subscriptionId,
Parameters.resourceGroupName,
Parameters.commitmentPlanName
],
queryParameters: [
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
requestBody: {
parameterPath: "patchPayload",
mapper: {
...Mappers.CommitmentPlanPatchPayload,
required: true
}
},
responses: {
200: {
bodyMapper: Mappers.CommitmentPlan
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const listOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "subscriptions/{subscriptionId}/providers/Microsoft.MachineLearning/commitmentPlans",
urlParameters: [
Parameters.subscriptionId
],
queryParameters: [
Parameters.skipToken,
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.CommitmentPlanListResult
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const listInResourceGroupOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
path: "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearning/commitmentPlans",
urlParameters: [
Parameters.subscriptionId,
Parameters.resourceGroupName
],
queryParameters: [
Parameters.skipToken,
Parameters.apiVersion
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.CommitmentPlanListResult
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const listNextOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
baseUrl: "https://management.azure.com",
path: "{nextLink}",
urlParameters: [
Parameters.nextPageLink
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.CommitmentPlanListResult
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
};
const listInResourceGroupNextOperationSpec: msRest.OperationSpec = {
httpMethod: "GET",
baseUrl: "https://management.azure.com",
path: "{nextLink}",
urlParameters: [
Parameters.nextPageLink
],
headerParameters: [
Parameters.acceptLanguage
],
responses: {
200: {
bodyMapper: Mappers.CommitmentPlanListResult
},
default: {
bodyMapper: Mappers.CloudError
}
},
serializer
}; | the_stack |
import {AnyAction, Store} from 'redux';
import {toast} from 'react-toastify';
import {CommsNode} from './commsNode';
import {ScenarioReducerActionType, setScenarioLocalAction, updateHeadActionIdsAction} from '../redux/scenarioReducer';
import {promiseSleep} from './promiseSleep';
import {
addPendingActionAction,
discardPendingActionAction,
setLastCommonScenarioAction,
TabletopValidationType
} from '../redux/tabletopValidationReducer';
import {
addConnectedUserAction,
challengeResponseAction,
challengeUserAction,
ConnectedUserActionTypes, ConnectedUserReducerAction, verifyConnectionAction,
verifyGMAction
} from '../redux/connectedUserReducer';
import {
getConnectedUsersFromStore,
getDeviceLayoutFromStore,
getLoggedInUserFromStore,
getScenarioFromStore,
getTabletopFromStore,
getTabletopValidationFromStore,
ReduxStoreType
} from '../redux/mainReducer';
import {isScenarioAction, NetworkedAction, NetworkedMeta} from './types';
import {getNetworkHubId, isUserAllowedOnTabletop, scenarioToJson} from './scenarioUtils';
import {enc, HmacSHA256} from 'crypto-js';
import {setTabletopIdAction} from '../redux/locationReducer';
export enum MessageTypeEnum {
CHECK_ACTIONS_MESSAGE = 'check-actions',
MISSING_ACTION_MESSAGE = 'missing-action',
RESEND_ACTIONS_MESSAGE = 'resend-actions',
CLOSE_MESSAGE = 'close'
}
interface CheckActionsMessageType {
message: MessageTypeEnum.CHECK_ACTIONS_MESSAGE;
headActionIds: string[];
}
export function checkActionsMessage(headActionIds: string[]): CheckActionsMessageType {
return {message: MessageTypeEnum.CHECK_ACTIONS_MESSAGE, headActionIds};
}
interface MissingActionMessageType {
message: MessageTypeEnum.MISSING_ACTION_MESSAGE;
actionId?: string;
missingActionIds: string[];
knownActionIds: string[];
}
export function missingActionMessage(missingActionIds: string[], knownActionIds: string[], actionId?: string): MissingActionMessageType {
return {message: MessageTypeEnum.MISSING_ACTION_MESSAGE, missingActionIds, knownActionIds, actionId};
}
type AncestorActions = {[actionId: string]: AnyAction} | null;
interface ResendActionsMessage {
message: MessageTypeEnum.RESEND_ACTIONS_MESSAGE;
pendingActionId?: string;
missingActionIds: string[];
actions: AncestorActions;
}
export function resendActionsMessage(missingActionIds: string[], actions: AncestorActions, pendingActionId?: string): ResendActionsMessage {
return {message: MessageTypeEnum.RESEND_ACTIONS_MESSAGE, missingActionIds, actions, pendingActionId};
}
interface CloseMessage {
message: MessageTypeEnum.CLOSE_MESSAGE;
reason: string;
}
export function closeMessage(reason: string): CloseMessage {
return {message: MessageTypeEnum.CLOSE_MESSAGE, reason};
}
type MessageType = CheckActionsMessageType | MissingActionMessageType | ResendActionsMessage | CloseMessage;
function findAncestorActions(validation: TabletopValidationType, knownActionIds: {[actionId: string]: boolean}, actionIds: string[], result: AncestorActions = {}): AncestorActions {
return actionIds.reduce((all, actionId) => {
if (all && !knownActionIds[actionId] && !all[actionId]) {
const action = validation.actionHistory[actionId];
if (!action) {
return null;
}
all[actionId] = action;
if (isScenarioAction(action)) {
return findAncestorActions(validation, knownActionIds, action.headActionIds, result);
}
}
return all;
}, result);
}
function postOrderActions(root: AnyAction, actions: {[actionId: string]: AnyAction}): AnyAction[] {
if (root.headActionIds) {
return [...root.headActionIds.reduce((all: AnyAction[], actionId: string) => {
const action = actions[actionId];
if (action) {
delete(actions[actionId]);
all.push(...postOrderActions(action, actions));
}
return all;
}, []), root];
} else {
return [root];
}
}
async function sendScenarioState(state: ReduxStoreType, commsNode: CommsNode, peerId: string) {
const [fullScenario, playerScenario] = scenarioToJson(getScenarioFromStore(state));
const peerUser = getConnectedUsersFromStore(state).users[peerId];
const scenario = peerUser.verifiedGM ? fullScenario : playerScenario;
await commsNode.sendTo({...setScenarioLocalAction(scenario), gmOnly: peerUser.verifiedGM}, {only: [peerId]});
}
async function receiveMessageFromPeer(store: Store<ReduxStoreType>, commsNode: CommsNode, peerId: string, message: MessageType) {
const state = store.getState();
let validation = getTabletopValidationFromStore(state);
const loggedInUser = getLoggedInUserFromStore(state)!;
switch (message.message) {
case MessageTypeEnum.CHECK_ACTIONS_MESSAGE:
const missingActionIds = findMissingActionIds(validation, message.headActionIds);
if (missingActionIds.length > 0) {
const knownActionIds = Object.keys(validation.actionHistory).concat(Object.keys(validation.initialActionIds));
await commsNode.sendTo(missingActionMessage(missingActionIds, knownActionIds), {only: [peerId]});
}
break;
case MessageTypeEnum.MISSING_ACTION_MESSAGE:
// Create a map of known actionIds to make checking faster
const knownActionIds = message.knownActionIds.reduce((all, actionId) => {
all[actionId] = true;
return all;
}, {});
// Accumulate the missing actions required to fill in the peer's gaps.
const resendActions = findAncestorActions(validation, knownActionIds, message.missingActionIds);
if (resendActions === null && loggedInUser.emailAddress === getTabletopFromStore(state).gm) {
// If we can't resolve their gaps, and we're a GM, simply assert our state.
await sendScenarioState(state, commsNode, peerId);
} else {
// Otherwise, send them back the actions they missed, or null if we can't fill the gaps.
await commsNode.sendTo(resendActionsMessage(message.missingActionIds, resendActions, message.actionId), {only: [peerId]});
}
break;
case MessageTypeEnum.RESEND_ACTIONS_MESSAGE:
if (message.actions === null) {
// They can't explain the missing actions - assert our state.
await sendScenarioState(state, commsNode, peerId);
if (message.pendingActionId) {
// Discard the pending action.
store.dispatch(discardPendingActionAction(message.pendingActionId));
}
return;
}
// Add the pending action, if any, to the set of actions returned to us, and dispatch them all in order.
const pendingAction = message.pendingActionId ? validation.pendingActions[message.pendingActionId] : undefined;
const dispatchActions = pendingAction ? {...message.actions, [pendingAction.actionId]: pendingAction} : message.actions;
const actionIds = message.pendingActionId ? [message.pendingActionId] : message.missingActionIds;
for (let pendingActionId of actionIds) {
if (!dispatchActions[pendingActionId]) {
// Undo/redo can cause actions to be lost from tabletopValidation.
continue;
}
const orderedActions = postOrderActions(dispatchActions[pendingActionId], dispatchActions);
for (let action of orderedActions) {
// Verify that we know the headActionIds of action.
if (isScenarioAction(action)) {
// NB we need to get tabletopValidation from the latest store state.
validation = getTabletopValidationFromStore(store.getState());
const missingActionIds = findMissingActionIds(validation, action.headActionIds);
if (missingActionIds.length > 0) {
console.error('Still have unknown action IDs - this should not happen');
// Ok, give up.
if (loggedInUser.emailAddress === getTabletopFromStore(state).gm) {
// If we're a GM, assert our scenario state.
await sendScenarioState(store.getState(), commsNode, peerId);
if (message.pendingActionId) {
// Discard the pending action.
store.dispatch(discardPendingActionAction(message.pendingActionId));
}
} else {
// Otherwise ask them to assert their scenario state.
await commsNode.sendTo(resendActionsMessage([], null), {only: [peerId]});
}
return;
}
}
// Dispatch the action, but remove peerKey and mark private so it isn't automatically sent on.
await dispatchGoodAction(store, commsNode, peerId, {...action, peerKey: undefined, private: true});
}
}
break;
case MessageTypeEnum.CLOSE_MESSAGE:
// The peer has closed the connection deliberately!
store.dispatch(setTabletopIdAction());
if (message.reason) {
toast(message.reason);
}
break;
}
}
function findMissingActionIds(validation: TabletopValidationType, headActionIds: string[]) {
return headActionIds.filter((actionId) => (!validation.actionHistory[actionId] && !validation.initialActionIds[actionId]));
}
export async function handleConnectionActions(action: ConnectedUserReducerAction, fromPeerId: string, store: Store<ReduxStoreType>, commsNode: CommsNode) {
const tabletop = getTabletopFromStore(store.getState());
switch (action.type) {
case ConnectedUserActionTypes.ADD_CONNECTED_USER:
// If I know the gm secret, challenge any user who claims to be the GM.
if (tabletop.gmSecret && action.user.emailAddress === tabletop.gm
&& !getConnectedUsersFromStore(store.getState()).users[action.peerId].verifiedGM) {
const challengeAction = challengeUserAction(fromPeerId);
store.dispatch(challengeAction);
await commsNode.sendTo(challengeAction, {only: [fromPeerId]});
} else {
// TODO need some actual validation mechanism to prove identity before we blithely believe their email address.
const tabletop = getTabletopFromStore(store.getState());
const allowed = isUserAllowedOnTabletop(tabletop.gm, action.user.emailAddress, tabletop.tabletopUserControl);
if (allowed !== null) {
store.dispatch(verifyConnectionAction(fromPeerId, allowed));
}
// Send a message to trigger the new client to perform a missing action check.
const playerTabletopValidation = getTabletopValidationFromStore(store.getState());
if (playerTabletopValidation.lastCommonScenario) {
await commsNode.sendTo(checkActionsMessage(playerTabletopValidation.lastCommonScenario.playerHeadActionIds), {only: [fromPeerId]});
}
}
break;
case ConnectedUserActionTypes.CHALLENGE_USER:
if (!action.challenge) {
throw new Error('no challenge in action ' + JSON.stringify(action));
}
if (!tabletop.gmSecret) {
throw new Error('no gmSecret in tabletop ' + JSON.stringify(tabletop));
}
// Respond to a challenge to prove we know the gmSecret.
const challengeHash = HmacSHA256(action.challenge, tabletop.gmSecret);
const responseAction = challengeResponseAction(fromPeerId, enc.Base64.stringify(challengeHash));
await commsNode.sendTo(responseAction, {only: [fromPeerId]});
break;
case ConnectedUserActionTypes.CHALLENGE_RESPONSE:
// Verify the response to a challenge.
const connectedUsers = getConnectedUsersFromStore(store.getState()).users;
const responseHash = HmacSHA256(connectedUsers[fromPeerId].challenge, tabletop.gmSecret);
if (action.response === enc.Base64.stringify(responseHash)) {
store.dispatch(verifyConnectionAction(fromPeerId, true));
store.dispatch(verifyGMAction(fromPeerId, true));
// Send a message to trigger the new client to perform a missing action check.
const tabletopValidation = getTabletopValidationFromStore(store.getState());
if (tabletopValidation.lastCommonScenario) {
await commsNode.sendTo(checkActionsMessage(tabletopValidation.lastCommonScenario.headActionIds), {only: [fromPeerId]});
}
}
break;
}
}
async function receiveActionFromPeer(store: Store<ReduxStoreType>, commsNode: CommsNode, peerId: string, action: NetworkedAction) {
if (isScenarioAction(action)) {
// Check that we know the action's headActionIds.
let validation = getTabletopValidationFromStore(store.getState());
let missingActionIds = findMissingActionIds(validation, action.headActionIds);
if (missingActionIds.length > 0) {
// Wait a short time in case those actions arrive by themselves.
await promiseSleep(500);
validation = getTabletopValidationFromStore(store.getState());
missingActionIds = findMissingActionIds(validation, action.headActionIds);
if (missingActionIds.length > 0) {
// Some actions are still unknown. We need to ask the peer for them.
store.dispatch(addPendingActionAction(action));
const knownActionIds = Object.keys(validation.actionHistory).concat(Object.keys(validation.initialActionIds));
await commsNode.sendTo(missingActionMessage(missingActionIds, knownActionIds, action.actionId), {only: [peerId]});
return;
}
}
}
// All good - dispatch action
await dispatchGoodAction(store, commsNode, peerId, action);
}
async function dispatchGoodAction(store: Store<ReduxStoreType>, commsNode: CommsNode, peerId: string, action: AnyAction) {
if (!commsNode.options.shouldDispatchLocally || commsNode.options.shouldDispatchLocally(action, store.getState(), commsNode)) {
store.dispatch(action);
}
if (isScenarioAction(action)) {
store.dispatch(updateHeadActionIdsAction(action));
store.dispatch(setLastCommonScenarioAction(getScenarioFromStore(store.getState()), action as ScenarioReducerActionType));
}
// Handle actions when a new user connects
await handleConnectionActions(action, peerId, store, commsNode);
const state = store.getState();
if (!action.private && commsNode.peerId === getNetworkHubId(commsNode.userId, commsNode.peerId, getTabletopFromStore(state).gm, getConnectedUsersFromStore(state).users)) {
// Network hub needs to forward good actions to other clients.
const connectedUsers = getConnectedUsersFromStore(state).users;
const only = Object.keys(connectedUsers)
.filter((peerId) => (peerId !== action.fromPeerId
&& (!action.gmOnly || connectedUsers[peerId].verifiedGM)
&& (action.type !== ConnectedUserActionTypes.ADD_CONNECTED_USER || peerId !== action.peerId)
));
await commsNode.sendTo(action, {only});
// Hub also needs to send existing connected user details to whoever just connected
if (action.type === ConnectedUserActionTypes.ADD_CONNECTED_USER) {
const deviceLayouts = getDeviceLayoutFromStore(state);
for (let otherPeerId of Object.keys(connectedUsers)) {
if (otherPeerId !== peerId) {
const user = connectedUsers[otherPeerId];
await commsNode.sendTo(addConnectedUserAction(otherPeerId, user.user, user.version!,
user.deviceWidth, user.deviceHeight, deviceLayouts[otherPeerId]),
{only: [peerId]});
}
}
}
}
}
function buildNetworkMetadata(state: ReduxStoreType, fromPeerId: string, originPeerId?: string): NetworkedMeta {
const tabletop = getTabletopFromStore(state);
const connectedUsers = getConnectedUsersFromStore(state);
const fromGM = (connectedUsers[fromPeerId] && connectedUsers[fromPeerId].verifiedConnection && connectedUsers[fromPeerId].user.emailAddress === tabletop.gm);
return {fromPeerId, originPeerId: originPeerId || fromPeerId, fromGM};
}
export default async function peerMessageHandler(store: Store<ReduxStoreType>, peerNode: CommsNode, peerId: string, data: string): Promise<void> {
const rawMessage = JSON.parse(data);
// Add network metadata to the action
const meta = buildNetworkMetadata(store.getState(), peerId, rawMessage.meta?.originPeerId || rawMessage.originPeerId);
const message = {...rawMessage, ...meta, meta: {...rawMessage.meta, ...meta}};
if (message.type) {
await receiveActionFromPeer(store, peerNode, peerId, message as NetworkedAction);
} else {
await receiveMessageFromPeer(store, peerNode, peerId, message as MessageType);
}
} | the_stack |
import 'chrome://resources/cr_elements/cr_button/cr_button.m.js';
import 'chrome://resources/cr_elements/cr_icon_button/cr_icon_button.m.js';
import 'chrome://resources/cr_elements/cr_link_row/cr_link_row.js';
import 'chrome://resources/cr_elements/shared_style_css.m.js';
import 'chrome://resources/polymer/v3_0/iron-flex-layout/iron-flex-layout-classes.js';
import '../controls/settings_toggle_button.js';
import '../prefs/prefs.js';
import '../site_settings/settings_category_default_radio_group.js';
import '../site_settings/site_data_details_subpage.js';
import '../settings_page/settings_animated_pages.js';
import '../settings_page/settings_subpage.js';
import '../settings_shared_css.js';
import {assert} from 'chrome://resources/js/assert.m.js';
import {focusWithoutInk} from 'chrome://resources/js/cr/ui/focus_without_ink.m.js';
import {I18nMixin, I18nMixinInterface} from 'chrome://resources/js/i18n_mixin.js';
import {WebUIListenerMixin, WebUIListenerMixinInterface} from 'chrome://resources/js/web_ui_listener_mixin.js';
import {html, PolymerElement} from 'chrome://resources/polymer/v3_0/polymer/polymer_bundled.min.js';
import {BaseMixin} from '../base_mixin.js';
import {SettingsToggleButtonElement} from '../controls/settings_toggle_button.js';
import {HatsBrowserProxyImpl, TrustSafetyInteraction} from '../hats_browser_proxy.js';
import {loadTimeData} from '../i18n_setup.js';
import {MetricsBrowserProxy, MetricsBrowserProxyImpl, PrivacyElementInteractions} from '../metrics_browser_proxy.js';
import {PrefsMixin, PrefsMixinInterface} from '../prefs/prefs_mixin.js';
import {routes} from '../route.js';
import {RouteObserverMixin, RouteObserverMixinInterface, Router} from '../router.js';
import {ChooserType, ContentSettingsTypes, CookieControlsMode, NotificationSetting} from '../site_settings/constants.js';
import {SiteSettingsPrefsBrowserProxyImpl} from '../site_settings/site_settings_prefs_browser_proxy.js';
import {PrivacyPageBrowserProxy, PrivacyPageBrowserProxyImpl} from './privacy_page_browser_proxy.js';
type BlockAutoplayStatus = {
enabled: boolean,
pref: chrome.settingsPrivate.PrefObject,
};
type FocusConfig = Map<string, (string|(() => void))>;
const SettingsPrivacyPageElementBase =
RouteObserverMixin(WebUIListenerMixin(
I18nMixin(PrefsMixin(BaseMixin(PolymerElement))))) as {
new ():
PolymerElement & I18nMixinInterface & WebUIListenerMixinInterface &
PrefsMixinInterface & RouteObserverMixinInterface
};
export class SettingsPrivacyPageElement extends SettingsPrivacyPageElementBase {
static get is() {
return 'settings-privacy-page';
}
static get template() {
return html`{__html_template__}`;
}
static get properties() {
return {
/**
* Preferences state.
*/
prefs: {
type: Object,
notify: true,
},
isGuest_: {
type: Boolean,
value() {
return loadTimeData.getBoolean('isGuest');
}
},
showClearBrowsingDataDialog_: Boolean,
enableSafeBrowsingSubresourceFilter_: {
type: Boolean,
value() {
return loadTimeData.getBoolean('enableSafeBrowsingSubresourceFilter');
}
},
cookieSettingDescription_: String,
enableBlockAutoplayContentSetting_: {
type: Boolean,
value() {
return loadTimeData.getBoolean('enableBlockAutoplayContentSetting');
}
},
blockAutoplayStatus_: {
type: Object,
value() {
return {};
}
},
enablePaymentHandlerContentSetting_: {
type: Boolean,
value() {
return loadTimeData.getBoolean('enablePaymentHandlerContentSetting');
}
},
enableExperimentalWebPlatformFeatures_: {
type: Boolean,
value() {
return loadTimeData.getBoolean(
'enableExperimentalWebPlatformFeatures');
},
},
enableSecurityKeysSubpage_: {
type: Boolean,
readOnly: true,
value() {
return loadTimeData.getBoolean('enableSecurityKeysSubpage');
}
},
enableQuietNotificationPromptsSetting_: {
type: Boolean,
value: () =>
loadTimeData.getBoolean('enableQuietNotificationPromptsSetting'),
},
enableWebBluetoothNewPermissionsBackend_: {
type: Boolean,
value: () =>
loadTimeData.getBoolean('enableWebBluetoothNewPermissionsBackend'),
},
enablePrivacyReview_: {
type: Boolean,
value: () => loadTimeData.getBoolean('privacyReviewEnabled'),
},
focusConfig_: {
type: Object,
value() {
const map = new Map();
if (routes.SECURITY) {
map.set(routes.SECURITY.path, '#securityLinkRow');
}
if (routes.COOKIES) {
map.set(
`${routes.COOKIES.path}_${routes.PRIVACY.path}`,
'#cookiesLinkRow');
map.set(
`${routes.COOKIES.path}_${routes.BASIC.path}`,
'#cookiesLinkRow');
}
if (routes.SITE_SETTINGS) {
map.set(routes.SITE_SETTINGS.path, '#permissionsLinkRow');
}
if (routes.PRIVACY_REVIEW) {
map.set(routes.PRIVACY_REVIEW.path, '#privacyReviewLinkRow');
}
return map;
},
},
/**
* Expose NotificationSetting enum to HTML bindings.
*/
notificationSettingEnum_: {
type: Object,
value: NotificationSetting,
},
searchFilter_: String,
siteDataFilter_: String,
/**
* Expose ContentSettingsTypes enum to HTML bindings.
*/
contentSettingsTypesEnum_: {
type: Object,
value: ContentSettingsTypes,
},
/**
* Expose ChooserType enum to HTML bindings.
*/
chooserTypeEnum_: {
type: Object,
value: ChooserType,
},
};
}
private isGuest_: boolean;
private showClearBrowsingDataDialog_: boolean;
private enableSafeBrowsingSubresourceFilter_: boolean;
private cookieSettingDescription_: string;
private enableBlockAutoplayContentSetting_: boolean;
private blockAutoplayStatus_: BlockAutoplayStatus;
private enablePaymentHandlerContentSetting_: boolean;
private enableExperimentalWebPlatformFeatures_: boolean;
private enableSecurityKeysSubpage_: boolean;
private enableQuietNotificationPromptsSetting_: boolean;
private enableWebBluetoothNewPermissionsBackend_: boolean;
private enablePrivacyReview_: boolean;
private focusConfig_: FocusConfig;
private searchFilter_: string;
private siteDataFilter_: string;
private browserProxy_: PrivacyPageBrowserProxy =
PrivacyPageBrowserProxyImpl.getInstance();
private metricsBrowserProxy_: MetricsBrowserProxy =
MetricsBrowserProxyImpl.getInstance();
ready() {
super.ready();
this.onBlockAutoplayStatusChanged_({
pref: {
key: '',
type: chrome.settingsPrivate.PrefType.BOOLEAN,
value: false,
},
enabled: false
});
this.addWebUIListener(
'onBlockAutoplayStatusChanged',
(status: BlockAutoplayStatus) =>
this.onBlockAutoplayStatusChanged_(status));
SiteSettingsPrefsBrowserProxyImpl.getInstance()
.getCookieSettingDescription()
.then(
(description: string) => this.cookieSettingDescription_ =
description);
this.addWebUIListener(
'cookieSettingDescriptionChanged',
(description: string) => this.cookieSettingDescription_ = description);
}
currentRouteChanged() {
this.showClearBrowsingDataDialog_ =
Router.getInstance().getCurrentRoute() === routes.CLEAR_BROWSER_DATA;
}
/**
* Called when the block autoplay status changes.
*/
private onBlockAutoplayStatusChanged_(autoplayStatus: BlockAutoplayStatus) {
this.blockAutoplayStatus_ = autoplayStatus;
}
/**
* Updates the block autoplay pref when the toggle is changed.
*/
private onBlockAutoplayToggleChange_(event: Event) {
const target = event.target as SettingsToggleButtonElement;
this.browserProxy_.setBlockAutoplayEnabled(target.checked);
}
/**
* This is a workaround to connect the remove all button to the subpage.
*/
private onRemoveAllCookiesFromSite_() {
// Intentionally not casting to SiteDataDetailsSubpageElement, as this would
// require importing site_data_details_subpage.js and would endup in the
// main JS bundle.
const node = this.shadowRoot!.querySelector('site-data-details-subpage');
if (node) {
node.removeAll();
}
}
private onClearBrowsingDataTap_() {
this.interactedWithPage_();
Router.getInstance().navigateTo(routes.CLEAR_BROWSER_DATA);
}
private onCookiesClick_() {
this.interactedWithPage_();
Router.getInstance().navigateTo(routes.COOKIES);
}
private onDialogClosed_() {
Router.getInstance().navigateTo(assert(routes.CLEAR_BROWSER_DATA.parent!));
setTimeout(() => {
// Focus after a timeout to ensure any a11y messages get read before
// screen readers read out the newly focused element.
focusWithoutInk(
assert(this.shadowRoot!.querySelector('#clearBrowsingData')!));
});
}
private onPermissionsPageClick_() {
this.interactedWithPage_();
Router.getInstance().navigateTo(routes.SITE_SETTINGS);
}
private onSecurityPageClick_() {
this.interactedWithPage_();
this.metricsBrowserProxy_.recordAction(
'SafeBrowsing.Settings.ShowedFromParentSettings');
Router.getInstance().navigateTo(routes.SECURITY);
}
private onPrivacySandboxClick_() {
this.metricsBrowserProxy_.recordAction(
'Settings.PrivacySandbox.OpenedFromSettingsParent');
// Create a MouseEvent directly to avoid Polymer failing to synthesise a
// click event if this function was called in response to a touch event.
// See crbug.com/1253883 for details.
// TODO(crbug/1159942): Replace this with an ordinary OpenWindowProxy call.
this.shadowRoot!.querySelector<HTMLAnchorElement>('#privacySandboxLink')!
.dispatchEvent(new MouseEvent('click'));
}
private onPrivacyReviewClick_() {
// TODO(crbug/1215630): Implement metrics.
Router.getInstance().navigateTo(
routes.PRIVACY_REVIEW, /* dynamicParams */ undefined,
/* removeSearch */ true);
}
private interactedWithPage_() {
HatsBrowserProxyImpl.getInstance().trustSafetyInteractionOccurred(
TrustSafetyInteraction.USED_PRIVACY_CARD);
}
private computePrivacySandboxSublabel_(): string {
return this.getPref('privacy_sandbox.apis_enabled').value ?
this.i18n('privacySandboxTrialsEnabled') :
this.i18n('privacySandboxTrialsDisabled');
}
}
customElements.define(
SettingsPrivacyPageElement.is, SettingsPrivacyPageElement); | the_stack |
import { Match, MatchEngine } from 'dimensions-ai';
import { DEFAULT_CONFIGS } from './defaults';
import { generateGame } from './Game/gen';
import { LuxMatchState, SerializedState } from './types';
import {
Action,
SpawnWorkerAction,
SpawnCityAction,
SpawnCartAction,
ResearchAction,
TransferAction,
MoveAction,
PillageAction,
} from './Actions';
import { Game } from './Game';
import { Unit } from './Unit';
import seedrandom from 'seedrandom';
import { deepCopy, deepMerge, sleep } from './utils';
import { Replay } from './Replay';
import { Cell } from './GameMap/cell';
import { GameMap } from './GameMap';
import { Resource } from './Resource';
import { KaggleObservation, parseKaggleObs } from './Replay/parseKaggleObs';
export class LuxDesignLogic {
// Initialization step of each match
static async initialize(match: Match): Promise<void> {
// initialize with default state and configurations and default RNG
const randseed = Math.floor(Math.random() * 1e9);
const state: LuxMatchState = {
configs: deepCopy(DEFAULT_CONFIGS),
game: null,
rng: seedrandom(`${randseed}`),
profile: null,
};
state.configs = deepMerge(state.configs, match.configs);
if (state.configs.runProfiler) {
state.profile = {
updateStage: [],
dataTransfer: [],
};
}
if (state.configs.seed !== undefined) {
state.rng = seedrandom(`${state.configs.seed}`);
} else {
state.configs.seed = randseed;
}
const forcedWidth = state.configs.width;
const forcedHeight = state.configs.height;
const game = generateGame(state.configs);
state.game = game;
game.replay = new Replay(
match,
state.configs.compressReplay,
state.configs.statefulReplay,
state.configs.out
);
game.replay.data.seed = state.configs.seed;
game.replay.data.width = forcedWidth;
game.replay.data.height = forcedHeight;
game.replay.data.mapType = state.configs.mapType;
match.log.detail(state.configs);
// store the state into the match so it can be used again in `update` and `getResults`
match.state = state;
game.map.sortResourcesDeterministically();
if (game.replay) {
game.replay.writeTeams(match.agents);
if (game.replay.statefulReplay) {
game.replay.writeState(game);
}
}
// send each agent their id
for (let i = 0; i < match.agents.length; i++) {
const agentID = match.agents[i].id;
await match.send(`${agentID}`, agentID);
}
// send all agents the current map width and height
// `width height` - width and height of the map
await match.sendAll(`${state.game.map.width} ${state.game.map.height}`);
await this.sendAllAgentsGameInformation(match);
await match.sendAll('D_DONE');
}
/**
* Sends map information formatted as so
*
* `rp t points` - the number of research points team `t` has
*
* `r resource_type x y amount` - the amount of resource of that type at `(x, y)`
* ...
*
* `u unit_type t unit_id x y cd w c u` - the unit on team `t` with id unit_id of type unit_type at `(x, y)` with cooldown `cd`,
* and `w` `c` `u` units of wood, coal, uranium
* ...
*
* `c t city_id f lk` - citeam `t`'s city with id city_id and fuel `f` and light upkeep `lk`
* ...
*
* `ct t city_id x y cd` - team `t`'s city tile part of city with id city_id at `(x, y)` with cooldown `cd`
* ...
*
*
* `ccd x y cd` - road level of cell at (x, y)
*
*/
static async sendAllAgentsGameInformation(match: Match): Promise<void> {
let stime: number;
const state: LuxMatchState = match.state;
const game = state.game;
if (game.configs.runProfiler) {
stime = new Date().valueOf();
}
const map = game.map;
const promises: Array<Promise<boolean>> = [];
const teams = [Unit.TEAM.A, Unit.TEAM.B];
// send research points
teams.forEach((team) => {
const pts = game.state.teamStates[team].researchPoints;
match.agents.forEach((agent) => {
if (!agent.isTerminated()) {
promises.push(match.send(`rp ${team} ${pts}`, agent));
}
});
});
// send resource information
// only send if agents not terminated
map.resources.forEach((cell) => {
match.agents.forEach((agent) => {
if (!agent.isTerminated()) {
promises.push(
match.send(
`r ${cell.resource.type} ${cell.pos.x} ${cell.pos.y} ${cell.resource.amount}`,
agent
)
);
}
});
});
// send unit information
teams.forEach((team) => {
const units = game.getTeamsUnits(team);
units.forEach((unit) => {
match.agents.forEach((agent) => {
if (!agent.isTerminated()) {
promises.push(
match.send(
`u ${unit.type} ${team} ${unit.id} ${unit.pos.x} ${unit.pos.y} ${unit.cooldown} ${unit.cargo.wood} ${unit.cargo.coal} ${unit.cargo.uranium}`,
agent
)
);
}
});
});
});
// send city information
game.cities.forEach((city) => {
match.agents.forEach((agent) => {
if (!agent.isTerminated()) {
promises.push(
match.send(
`c ${city.team} ${city.id} ${city.fuel} ${city.getLightUpkeep()}`,
agent
)
);
}
});
});
game.cities.forEach((city) => {
city.citycells.forEach((cell) => {
match.agents.forEach((agent) => {
if (!agent.isTerminated()) {
promises.push(
match.send(
`ct ${city.team} ${city.id} ${cell.pos.x} ${cell.pos.y} ${cell.citytile.cooldown}`,
agent
)
);
}
});
});
});
// send road info in the form of cooldown discounts of cells
for (let y = 0; y < game.map.height; y++) {
for (let x = 0; x < game.map.width; x++) {
const cd = game.map.getCell(x, y).getRoad();
// ignore cooldowns of 0
if (cd !== 0) {
match.agents.forEach((agent) => {
if (!agent.isTerminated()) {
promises.push(match.send(`ccd ${x} ${y} ${cd}`, agent));
}
});
}
}
}
await Promise.all(promises);
if (game.configs.runProfiler) {
const etime = new Date().valueOf();
state.profile.dataTransfer.push(etime - stime);
}
}
// Update step of each match, called whenever the match moves forward by a single unit in time (1 timeStep)
static async update(
match: Match,
commands: Array<MatchEngine.Command>
): Promise<Match.Status> {
const state: LuxMatchState = match.state;
const game = state.game;
let stime: number;
if (game.configs.runProfiler) {
stime = new Date().valueOf();
}
match.log.detail('Processing turn ' + game.state.turn);
if (!game.configs.debugAnnotations) {
// filter out all debug commands
commands = commands.filter((cmd) => {
const strs = cmd.command.split(' ');
const action = strs[0];
if (action[0] === 'd') {
return false;
}
return true;
});
}
if (game.replay) {
game.replay.data.allCommands.push(commands);
}
// loop over commands and validate and map into internal action representations
const actionsMap: Map<Game.ACTIONS, Array<Action>> = new Map();
Object.values(Game.ACTIONS).forEach((val) => {
actionsMap.set(val, []);
});
const accumulatedActionStats = game._genInitialAccumulatedActionStats();
for (let i = 0; i < commands.length; i++) {
// get the command and the agent that issued it and handle appropriately
try {
const action = game.validateCommand(
commands[i],
accumulatedActionStats
);
if (action != null) {
// TODO: this might be slow, depends on its optimized and compiled
const newactionArray = [...actionsMap.get(action.action), action];
actionsMap.set(action.action, newactionArray);
}
} catch (err) {
match.log.warn(`${err.message}`);
}
}
// give units and city tiles their validated actions to use
actionsMap
.get(Game.ACTIONS.BUILD_CITY)
.forEach((action: SpawnCityAction) => {
game.getUnit(action.team, action.unitid).giveAction(action);
});
actionsMap
.get(Game.ACTIONS.BUILD_WORKER)
.forEach((action: SpawnWorkerAction) => {
const citytile = game.map.getCell(action.x, action.y).citytile;
citytile.giveAction(action);
});
actionsMap
.get(Game.ACTIONS.BUILD_CART)
.forEach((action: SpawnCartAction) => {
const citytile = game.map.getCell(action.x, action.y).citytile;
citytile.giveAction(action);
});
actionsMap.get(Game.ACTIONS.PILLAGE).forEach((action: PillageAction) => {
game.getUnit(action.team, action.unitid).giveAction(action);
});
actionsMap.get(Game.ACTIONS.RESEARCH).forEach((action: ResearchAction) => {
const citytile = game.map.getCell(action.x, action.y).citytile;
citytile.giveAction(action);
});
actionsMap.get(Game.ACTIONS.TRANSFER).forEach((action: TransferAction) => {
game.getUnit(action.team, action.srcID).giveAction(action);
});
const prunedMoveActions = game.handleMovementActions(
actionsMap.get(Game.ACTIONS.MOVE) as Array<MoveAction>,
match
);
prunedMoveActions.forEach((action) => {
// if direction is center, ignore it
if (action.direction !== Game.DIRECTIONS.CENTER) {
game.getUnit(action.team, action.unitid).giveAction(action);
}
});
// now we go through every actionable entity and execute actions
game.cities.forEach((city) => {
city.citycells.forEach((cellWithCityTile) => {
try {
cellWithCityTile.citytile.handleTurn(game);
} catch (err) {
match.throw(cellWithCityTile.citytile.team, err);
}
});
});
const teams = [Unit.TEAM.A, Unit.TEAM.B];
for (const team of teams) {
game.state.teamStates[team].units.forEach((unit) => {
try {
unit.handleTurn(game);
} catch (err) {
match.log.warn(`${err.message}`);
}
});
}
// distribute all resources in order of decreasing fuel efficiency
game.distributeAllResources();
// now we make all units with cargo drop all resources on the city they are standing on
for (const team of teams) {
game.state.teamStates[team].units.forEach((unit) => {
game.handleResourceDeposit(unit);
});
}
if (game.isNight()) {
this.handleNight(state);
}
// remove resources that are depleted from map
const newResourcesMap: Array<Cell> = [];
for (let i = 0; i < game.map.resources.length; i++) {
const cell = game.map.resources[i];
if (cell.resource.amount > 0) {
newResourcesMap.push(cell);
}
}
game.map.resources = newResourcesMap;
// regenerate forests
game.regenerateTrees();
if (state.configs.debug) {
await this.debugViewer(game);
}
const matchOver = this.matchOver(match);
game.state.turn++;
// store state
if (game.replay.statefulReplay) {
game.replay.writeState(game);
}
game.runCooldowns();
/** Agent Update Section */
await this.sendAllAgentsGameInformation(match);
// tell all agents updates are done
const donemsgs: Promise<boolean>[] = [];
match.agents.forEach((agent) => {
if (!agent.isTerminated()) {
donemsgs.push(match.send('D_DONE', agent));
}
})
await Promise.all(donemsgs);
if (matchOver) {
if (game.replay) {
game.replay.writeOut(this.getResults(match));
}
return 'finished' as Match.Status.FINISHED;
}
if (game.configs.runProfiler) {
const etime = new Date().valueOf();
state.profile.updateStage.push(etime - stime);
}
match.log.detail('Beginning turn ' + game.state.turn);
}
static async debugViewer(game: Game): Promise<void> {
console.clear();
console.log(game.map.getMapString());
console.log(`Turn: ${game.state.turn}`);
const teams = [Unit.TEAM.A, Unit.TEAM.B];
for (const team of teams) {
const teamstate = game.state.teamStates[team];
const msg = `RP: ${teamstate.researchPoints} | Units: ${teamstate.units.size}`;
// teamstate.units.forEach((unit) => {
// msg += `| ${unit.id} (${unit.pos.x}, ${
// unit.pos.y
// }) cargo space: ${unit.getCargoSpaceLeft()}`;
// });
if (team === Unit.TEAM.A) {
console.log(msg.cyan);
} else {
console.log(msg.red);
}
}
game.cities.forEach((city) => {
let iden = `City ${city.id}`.red;
if (city.team === 0) {
iden = `City ${city.id}`.cyan;
}
console.log(
`${iden} light: ${city.fuel} - size: ${city.citycells.length}`
);
});
await sleep(game.configs.debugDelay);
}
/**
* Determine if match is over or not
* @param state
*/
static matchOver(match: Match): boolean {
const state: Readonly<LuxMatchState> = match.state;
const game = state.game;
if (game.state.turn === state.configs.parameters.MAX_DAYS - 1) {
return true;
}
// over if at least one team has no units left or city tiles
const teams = [Unit.TEAM.A, Unit.TEAM.B];
const cityCount = [0, 0];
game.cities.forEach((city) => {
cityCount[city.team] += 1;
});
for (const team of teams) {
if (game.getTeamsUnits(team).size + cityCount[team] === 0) {
return true;
}
}
}
/**
* Handle nightfall and update state accordingly
* @param state
*/
static handleNight(state: LuxMatchState): void {
const game = state.game;
game.cities.forEach((city) => {
// if city does not have enough fuel, destroy it
// TODO, probably add this event to replay
if (city.fuel < city.getLightUpkeep()) {
game.destroyCity(city.id);
} else {
city.fuel -= city.getLightUpkeep();
}
});
[Unit.TEAM.A, Unit.TEAM.B].forEach((team) => {
game.state.teamStates[team].units.forEach((unit) => {
// TODO: add condition for different light upkeep for units stacked on a city.
if (!game.map.getCellByPos(unit.pos).isCityTile()) {
if (!unit.spendFuelToSurvive()) {
// delete unit
game.destroyUnit(unit.team, unit.id);
}
}
});
});
}
static getResults(match: Match): any {
// calculate results
const state: LuxMatchState = match.state;
const game = state.game;
let winningTeam = Unit.TEAM.A;
let losingTeam = Unit.TEAM.B;
figureresults: {
// count city tiles
const cityTileCount = [0, 0];
game.cities.forEach((city) => {
cityTileCount[city.team] += city.citycells.length;
});
if (cityTileCount[Unit.TEAM.A] > cityTileCount[Unit.TEAM.B]) {
break figureresults;
} else if (cityTileCount[Unit.TEAM.A] < cityTileCount[Unit.TEAM.B]) {
winningTeam = Unit.TEAM.B;
losingTeam = Unit.TEAM.A;
break figureresults;
}
// if tied, count by units
const unitCount = [
game.getTeamsUnits(Unit.TEAM.A),
game.getTeamsUnits(Unit.TEAM.B),
];
if (unitCount[Unit.TEAM.A].size > unitCount[Unit.TEAM.B].size) {
break figureresults;
} else if (unitCount[Unit.TEAM.A].size < unitCount[Unit.TEAM.B].size) {
winningTeam = Unit.TEAM.B;
losingTeam = Unit.TEAM.A;
break figureresults;
}
// if tied still, return a tie
const results = {
ranks: [
{ rank: 1, agentID: winningTeam },
{ rank: 1, agentID: losingTeam },
],
replayFile: null,
};
if (game.configs.storeReplay) {
results.replayFile = game.replay.replayFilePath;
}
return results;
// // if tied still, count by fuel generation
// if (
// game.stats.teamStats[Unit.TEAM.A].fuelGenerated >
// game.stats.teamStats[Unit.TEAM.B].fuelGenerated
// ) {
// break figureresults;
// } else if (
// game.stats.teamStats[Unit.TEAM.A].fuelGenerated <
// game.stats.teamStats[Unit.TEAM.B].fuelGenerated
// ) {
// winningTeam = Unit.TEAM.B;
// losingTeam = Unit.TEAM.A;
// break figureresults;
// }
// // if still undecided, for now, go by random choice
// if (state.rng() > 0.5) {
// winningTeam = Unit.TEAM.B;
// losingTeam = Unit.TEAM.A;
// }
}
const results = {
ranks: [
{ rank: 1, agentID: winningTeam },
{ rank: 2, agentID: losingTeam },
],
replayFile: null,
};
if (game.configs.storeReplay) {
results.replayFile = game.replay.replayFilePath;
}
return results;
}
/**
* Reset the match to a starting state and continue from there
* @param serializedState
*
* DOES NOT change constants at all
*/
static reset(
match: Match,
serializedState: SerializedState | KaggleObservation
): void {
/**
* For this to work correctly, spawn all entities in first, then update any stats / global related things as
* some spawning functions updates the stats or globals e.g. global ids
*/
const state: LuxMatchState = match.state;
const game = state.game;
function isKaggleObs(
obs: SerializedState | KaggleObservation
): obs is KaggleObservation {
return (obs as KaggleObservation).updates !== undefined;
}
if (isKaggleObs(serializedState)) {
// handle reduced states (e.g. kaggle outputs)
serializedState = parseKaggleObs(serializedState);
}
// update map first
const height = serializedState.map.length;
const width = serializedState.map[0].length;
const configs = {
...game.configs,
};
configs.width = width;
configs.height = height;
game.map = new GameMap(configs);
for (let y = 0; y < height; y++) {
for (let x = 0; x < width; x++) {
const cellinfo = serializedState.map[y][x];
if (cellinfo.resource) {
game.map.addResource(
x,
y,
cellinfo.resource.type as Resource.Types,
cellinfo.resource.amount
);
}
const cell = game.map.getCell(x, y);
cell.road = cellinfo.road;
}
}
// spawn in cities
game.cities = new Map();
for (const cityid of Object.keys(serializedState.cities)) {
const cityinfo = serializedState.cities[cityid];
cityinfo.cityCells.forEach((ct) => {
const tile = game.spawnCityTile(cityinfo.team, ct.x, ct.y, cityinfo.id);
tile.cooldown = ct.cooldown;
});
const city = game.cities.get(cityinfo.id);
city.fuel = cityinfo.fuel;
}
const teams = [Unit.TEAM.A, Unit.TEAM.B];
for (const team of teams) {
game.state.teamStates[team].researchPoints =
serializedState.teamStates[team].researchPoints;
game.state.teamStates[team].researched = deepCopy(
serializedState.teamStates[team].researched
);
game.state.teamStates[team].units.clear();
for (const unitid of Object.keys(
serializedState.teamStates[team].units
)) {
const unitinfo = serializedState.teamStates[team].units[unitid];
let unit: Unit;
if (unitinfo.type === Unit.Type.WORKER) {
unit = game.spawnWorker(team, unitinfo.x, unitinfo.y, unitid);
} else {
unit = game.spawnCart(team, unitinfo.x, unitinfo.y, unitid);
}
unit.cargo = deepCopy(unitinfo.cargo);
unit.cooldown = deepCopy(unitinfo.cooldown);
}
}
// update globals
game.state.turn = serializedState.turn;
game.globalCityIDCount = serializedState.globalCityIDCount;
game.globalUnitIDCount = serializedState.globalUnitIDCount;
// game.stats = deepCopy(serializedState.stats);
// without this, causes some bugs
game.map.sortResourcesDeterministically();
}
} | the_stack |
import {isNullOrUndefined, isUndefined} from 'util';
import * as $ from 'jquery';
import * as _ from 'lodash';
import Split from 'split.js'
import {Observable} from 'rxjs';
import {Subscription} from 'rxjs/Subscription';
import {
AfterViewInit,
Component,
ElementRef,
EventEmitter,
HostListener,
Injector,
Input,
OnDestroy,
OnInit,
Output,
ViewChild
} from '@angular/core';
import {ActivatedRoute} from '@angular/router';
import {Alert} from '@common/util/alert.util';
import {StringUtil} from '@common/util/string.util';
import {CommonConstant} from '@common/constant/common.constant';
import {EventBroadcaster} from '@common/event/event.broadcaster';
import {AbstractPopupComponent} from '@common/component/abstract-popup.component';
import {PrDataflow} from '@domain/data-preparation/pr-dataflow';
import {DsType, Field, PrDataset, Rule} from '@domain/data-preparation/pr-dataset';
import {CreateSnapshotPopupComponent} from '../../../../component/create-snapshot-popup.component';
import {DataSnapshotDetailComponent} from '../../../../data-snapshot/data-snapshot-detail.component';
import {PreparationCommonUtil} from '../../../../util/preparation-common.util';
import {PreparationAlert} from '../../../../util/preparation-alert.util';
import {DataflowModelService} from '../../../service/dataflow.model.service';
import {DataflowService} from '../../../service/dataflow.service';
import {RuleListComponent} from './rule-list.component';
import {EditRuleComponent} from './edit-rule/edit-rule.component';
import {ExtendInputFormulaComponent} from './extend-input-formula.component';
import {EditRuleGridComponent} from './edit-rule-grid/edit-rule-grid.component';
import {MultipleRenamePopupComponent} from './multiple-rename-popup.component';
@Component({
selector: 'app-edit-dataflow-rule-2',
templateUrl: './edit-dataflow-rule-2.component.html',
styles: ['.ddp-type-selectbox ul.ddp-list-selectbox li a:hover {background:none}']
})
export class EditDataflowRule2Component extends AbstractPopupComponent implements OnInit, OnDestroy, AfterViewInit {
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
| Private Variables
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
@ViewChild(EditRuleGridComponent)
private _editRuleGridComp: EditRuleGridComponent;
@ViewChild(MultipleRenamePopupComponent)
private multipleRenamePopupComponent: MultipleRenamePopupComponent;
@ViewChild(ExtendInputFormulaComponent)
private extendInputFormulaComponent: ExtendInputFormulaComponent;
@ViewChild(CreateSnapshotPopupComponent)
private createSnapshotPopup: CreateSnapshotPopupComponent;
@ViewChild('editRule')
private _editRuleComp: EditRuleComponent;
@ViewChild(RuleListComponent)
private ruleListComponent : RuleListComponent;
@ViewChild(DataSnapshotDetailComponent)
private dataSnapshotDetailComponent : DataSnapshotDetailComponent;
private _split: any;
private _isExecAddRule:boolean = false;
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
| Protected Variables
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
| Public Variables
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
public isInitDataLoaded: boolean = false;
@Input()
public dataflow: PrDataflow;
@Input()
public selectedDataSet: PrDataset;
@Output()
public closeEditRule = new EventEmitter();
@Output()
public changeDataset = new EventEmitter<PrDataset>();
@Input()
public step: string;
// 검색어
public commandSearchText: string = '';
// 조인 편집시 필요한 데이터
public rightDataset: PrDataset;
// Layer show/hide
public isMultiColumnListShow: boolean = false;
public isRuleJoinModalShow: boolean = false;
public isRuleUnionModalShow: boolean = false;
public isOtherDatasetListShow: boolean = false;
public isCommandListShow: boolean = false;
// Rules
public ruleVO: Rule = new Rule();
public ruleList: any[] = [];
public isJumped: boolean = false;
public redoable: boolean = false;
public undoable: boolean = false;
// Flag for undo and redo, to stop users from multiple requests
public isRedoRunning: boolean = false;
public isUndoRunning: boolean = false;
// Add rule / editor or builder
public editorUseFlag: boolean = false;
public editorUseLabel: string = this.translateService.instant('msg.dp.btn.switch.editor');
// input cmd line
public inputRuleCmd: string = ''; // Rule을 직접 입력시
// input focus 여부
public isFocus = false;
// 툴팁 show/hide
public isTooltipShow = false;
// Flag for mouse movement and keyboard navigation
public flag: boolean = false;
// 룰 수정시 다른 룰로 바꾸면 append 되는것을 막기위한 변수 - 몇번째 룰을 편집하는지 갖고있는다
public ruleNo: any;
// Join / Union 편집용 룰문자열
public editJoinOrUnionRuleStr: string;
public commandList: any[];
public editColumnList = []; // 수정 할 컬럼 리스트
public selectedColumns: string[] = []; // 그리드에서 선택된 컬럼 리스트
public selectedRows: any = []; // 그리드에서 선택된 로우 리스트
// tell if union is updating or just adding
public isUpdate: boolean = false;
public isForward: boolean; // location.forward
// Histogram
public charts: any = [];
// 현재 서버와 맞는 index
public serverSyncIndex: number;
// APPEND (룰 등록) / UPDATE (룰 수정) / JUMP / PREPARE_UPDATE (룰 수정하기 위해 jump) / DELETE
public opString: string = 'APPEND';
public isAggregationIncluded: boolean = false;
public scrollLeft: string;
get filteredWrangledDatasets() {
if (_.isNil(this.dsList) || this.dsList.length === 0) return [];
let list = this.dsList;
list = list.filter((dataset) => {
return dataset.dsType.toString() === 'WRANGLED';
}).map((data) => {
data.current = data.dsId === this.dsId;
return data;
});
return list;
}
// command List (search)
get filteredCommandList() {
let commandList = this.commandList;
const isSearchTextEmpty = StringUtil.isNotEmpty(this.commandSearchText);
const enCheckReg = /^[A-Za-z]+$/;
// Check Search Text
if (isSearchTextEmpty) {
commandList = commandList.filter((item) => {
// language(en or ko) check
if(enCheckReg.test(this.commandSearchText)) {
return item.command.toLowerCase().indexOf(this.commandSearchText.toLowerCase()) > -1;
} else {
return item.command_h.some(v=> v.indexOf(this.commandSearchText) > -1 );
}
});
}
return commandList;
}
public dfId: string;
public dsId: string;
public dsName: string;
public dsList: PrDataset[];
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
| Constructor
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
// 생성자
constructor(private dataflowService: DataflowService,
private dataflowModelService: DataflowModelService,
private broadCaster: EventBroadcaster,
private route: ActivatedRoute,
protected elementRef: ElementRef,
protected injector: Injector) {
super(elementRef, injector);
this.route.params.subscribe((params) => {
this.dsId = params['dsId'];
this.dfId = params['dfId'];
});
this.useUnloadConfirm = false;
}
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
| Override Method
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
public canDeactive(): Observable<boolean> | boolean {
return this.useUnloadConfirm;
}
// Init
public ngOnInit() {
// Init
super.ngOnInit();
this.subscriptions.push(
this.location.subscribe((_popState) => {
if( this.isForward !== true ) {
this.isForward = true;
this.location.forward();
} else {
this.isForward = false;
}
}) as Subscription
);
// 데이터소스 생성완료시 사용
this.subscriptions.push(
this.broadCaster.on<any>('CREATED_DATASOURCE_SNAPSHOT').subscribe(() => {
this.useUnloadConfirm = true;
})
);
// 필드 펼침/숨김에 대한 이벤트
this.subscriptions.push(
this.broadCaster.on<any>('EDIT_RULE_SHOW_HIDE_LAYER').subscribe((data: { id : string, isShow : boolean }) => {
if (data.id === 'toggleList') {
this.isMultiColumnListShow = data.isShow;
} else {
this.isMultiColumnListShow = data.isShow;
this.isCommandListShow = false;
}
// scroll 위치 조정
if (this.isMultiColumnListShow) {
const left = $('.ddp-wrap-rulecontents')[0].scrollLeft;
this.scrollLeft = '-' + left + 'px';
} else {
this.scrollLeft = '';
}
})
);
this._initialiseValues();
this._getDataflowAndDataset();
}
public ngAfterViewInit() {
this._setSplit();
}
public ngOnDestroy() {
super.ngOnDestroy();
this._destroySplit();
}
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
| Public Method
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
/**
* Snapshot list refresh
*/
public initSnapshotList() {
this.ruleListComponent.changeTab(1);
}
/**
* Create snapshot popup close
* @Param data (when coming from snapshot popup)
*/
public snapshotCreateClose(data?) {
if (1 === this.ruleListComponent.tabNumber) {
this.ruleListComponent.getSnapshotList();
}
if (data) { // Open rename popup for hive rename
this.multipleRenamePopupComponent.init(
{
gridData: {data: null, fields: null},
dsName: this.selectedDataSet.dsName,
typeDesc: null,
isFromSnapshot : true, dsId: this.dsId});
}
}
/**
* Snapshot detail popup open
* @param data
*/
public getSnapshotDetail(data) {
this.dataSnapshotDetailComponent.init(data, true);
} // function - getSnapshotDetail
/**
* move to previous step
*/
public prev() {
// save id and type for dataflow detail page
this.dataflowModelService.setSelectedDsId(this.dsId);
this.dataflowModelService.setSelectedDsType(DsType.WRANGLED);
this.useUnloadConfirm = true;
this.router.navigate([`/management/datapreparation/dataflow/${this.dfId}`]);
}
/**
* open create snapshot popup
*/
public createSnapshot() {
this.ruleListComponent.clearExistingInterval();
this.createSnapshotPopup.init({
id: this.dsId,
name: this.dsName,
isFromMainGrid: true,
});
}
/**
* Join 설정 완료 이벤트
* @param $event Join 설정 정보
*/
public ruleJoinComplete($event) {
if ($event.ruleInfo) { // Join complete
this.applyRule($event.ruleInfo);
} else { // cancel join
this.jump(this.serverSyncIndex);
}
this.isRuleJoinModalShow = false;
}
/**
* union 설정 완료 이벤트
* @param $event Union 설정 정보
*/
public ruleUnionComplete($event) {
if ($event.ruleInfo) { // Union complete
this.applyRule($event.ruleInfo);
} else { // Cancel union
this.jump(this.serverSyncIndex);
}
this.isRuleUnionModalShow = false;
this.isUpdate = false;
} // function - ruleUnionComplete
// 좌측 상단 네비 - 다른 데이터셋 리스트 보여주는
public showDatasets() {
this.isOtherDatasetListShow = !this.isOtherDatasetListShow;
}
// switch editor
public switchEditor() {
if (this.editorUseFlag === true) {
this.editorUseFlag = false;
this.editorUseLabel = this.translateService.instant('msg.dp.btn.switch.editor');
} else {
this.editorUseFlag = true;
this.editorUseLabel = this.translateService.instant('msg.dp.btn.switch.builder');
}
}
// command list show
public showCommandList() {
// Close all opened select box from rule
this.broadCaster.broadcast('EDIT_RULE_SHOW_HIDE_LAYER', { id : 'toggleList', isShow : false } );
// 포커스 이동
setTimeout(() => $('#commandSearch').trigger('focus'));
this.commandSearchText = '';
this.isCommandListShow = true;
this.initSelectedCommand(this.filteredCommandList);
this.safelyDetectChanges();
}
/**
* When command is selected from commandList
* @param event
* @param command
*/
public selectCommand(event, command) {
event.stopImmediatePropagation();
// TODO : Added getting selected columns from grid because didn't show selected columns when command is changed on edit
this.selectedColumns = this._editRuleGridComp.getSelectedColumns();
this.ruleVO.cols = this.selectedColumns;
if (isNullOrUndefined(command)) {
return;
}
this.ruleVO.command = command.command;
this.ruleVO.alias = command.alias;
this.ruleVO.desc = command.desc;
// 검색어 초기화 및 레이어 닫기
this.commandSearchText = '';
this.isCommandListShow = false;
this.safelyDetectChanges();
let selectedFields:Field[] = [];
if( this.selectedColumns ) {
selectedFields = this.selectedColumns.map( col => this.selectedDataSet.gridData.fields.find( field => field.uuid === col ) );
}
switch (this.ruleVO.command) {
case 'setformat':
const colDescs = this.selectedDataSet.gridResponse.colDescs.filter((item) => {
return item.type === 'TIMESTAMP'
});
this._editRuleComp.setValue('dsId', this.dsId);
this._editRuleComp.setValue('colTypes', colDescs);
break;
case 'settype':
this._editRuleComp.setValue('dsId', this.dsId);
this._editRuleComp.setValue('colTypes', this.selectedDataSet.gridResponse.colDescs);
break;
case 'join':
this.rightDataset = new PrDataset();
this.rightDataset.dsId = '';
this.isRuleJoinModalShow = true;
break;
case 'union':
this.editJoinOrUnionRuleStr = '';
this.isRuleUnionModalShow = true;
break;
}
if (command.command !== 'join' && command.command !== 'union') {
this._editRuleComp.init(this.selectedDataSet.gridData.fields, selectedFields);
}
this.initSelectedCommand(this.filteredCommandList);
}
/**
* Rule cancel or 초기 세팅
* @param data
*/
public initRule(data?) {
// default 는 append
this.opString = 'APPEND';
// 룰 리스트에서 선택된 룰이 없게 this.ruleNo 초기화
this.ruleNo = null;
this.ruleVO = new Rule();
// this.inputRuleCmd = '';
// redo, undo를 초기화 한다.
if (data) this.initRedoUndo(data);
} // function - initRule
/**
* Initialise redo and undo buttons
* @param data
*/
public initRedoUndo(data) {
switch (data.redoable) {
case 'true':
this.redoable = true;
break;
case 'false' :
this.redoable = false;
break;
}
switch (data.undoable) {
case 'true':
this.undoable = true;
break;
case 'false' :
this.undoable = false;
break;
}
} // function - initRedoUndo
/**
* Apply rule. (When Add button is clicked)
*/
public addRule() {
if( this._isExecAddRule ) {
this.editorUseFlag = false;
return;
}
this._isExecAddRule = true;
let rule: any = {};
if (this.editorUseFlag === false) {
if (isUndefined(this.ruleVO['command']) || '' === this.ruleVO['command']) {
Alert.warning(this.translateService.instant('msg.dp.alert.no.data'));
this._isExecAddRule = false;
return;
}
// get rule string from individual components
rule = this._editRuleComp.getRuleData();
if (isUndefined(rule)) {
this._isExecAddRule = false;
return;
}
// set param
rule['op'] = this.opString;
rule['ruleIdx'] = this.serverSyncIndex;
} else { // Using editor
if (this.inputRuleCmd === '') {
Alert.warning(this.translateService.instant('msg.dp.alert.editor.warn'));
this._isExecAddRule = false;
return;
}
rule = {
op: this.opString,
ruleIdx: this.serverSyncIndex,
ruleString: this.inputRuleCmd,
uiRuleString: {isBuilder: false}
};
}
if (!isUndefined(rule)) {
let isErrorCommand : boolean = true;
for(const ind in this.commandList) {
if ( rule.ruleString.indexOf(this.commandList[ind].command) > -1 ) isErrorCommand = false;
}
if (isErrorCommand){
this._isExecAddRule = false;
Alert.error(this.translateService.instant('msg.dp.alert.command.error'));
return;
}
this.applyRule(rule);
}
} // function - addRule
/**
* 편집시 기존의 수식이 각 위치에 들어간다
* @param rule : rule 수정할 rule 정보
* @param gridData
*/
public setEditInfo(rule, gridData:any) {
try {
const jsonRuleString = JSON.parse(rule.jsonRuleString);
// Set ruleString for editor
this.inputRuleCmd = rule.ruleString;
// If editor
if (!jsonRuleString.isBuilder) {
this.editorUseFlag = true;
return;
}
// TODO : eventually remove ruleVO
// this.initRule(); --> Check if this is necessary here
this.ruleVO = rule['ruleVO'];
('' === this.ruleVO.command) && (this.ruleVO.command = this.ruleVO['name']);
this.safelyDetectChanges();
if (jsonRuleString.name === 'settype') {
this._editRuleComp.setValue('dsId', this.dsId);
this._editRuleComp.setValue('colTypes', this.selectedDataSet.gridResponse.colDescs);
}
if (jsonRuleString.name === 'setformat') {
const colDescs = this.selectedDataSet.gridResponse.colDescs.filter((item) => {
return item.type === 'TIMESTAMP'
});
this._editRuleComp.setValue('dsId', this.dsId);
this._editRuleComp.setValue('colTypes', colDescs);
}
if (jsonRuleString.name === 'rename') {
if (jsonRuleString.col.length !== 1) {
this.multipleRenamePopupComponent.init({
gridData: _.cloneDeep(gridData),
dsName: this.dsName,
typeDesc: this.selectedDataSet.gridResponse.colDescs,
editInfo: {ruleCurIdx: this.ruleVO['ruleNo'],
cols: jsonRuleString.col,
to: jsonRuleString.to}
});
}
}
if (jsonRuleString.name === 'join') {
if (this.selectedDataSet.gridData.data.length > 0) {
this.editJoinOrUnionRuleStr = rule['jsonRuleString'];
this.setJoinEditInfo(rule);
} else {
Alert.warning('No rows to join');
}
}
if (jsonRuleString.name === 'union') {
if (this.selectedDataSet.gridData.data.length > 0) {
this.editJoinOrUnionRuleStr = rule['jsonRuleString'];
this.isUpdate = true;
this.isRuleUnionModalShow = true;
} else {
Alert.warning('No rows to union');
}
}
const ruleNames : string[] = ['union', 'join'];
if (-1 === ruleNames.indexOf(jsonRuleString.name)) {
this._editRuleComp.init(gridData.fields, [], {jsonRuleString : jsonRuleString});
}
} catch (e) {
Alert.error(this.translateService.instant('msg.dp.alert.rule.edit.fail'));
}
}
/**
* 룰 수정 클릭시
* @param editInfo
*/
public setEditData(editInfo) {
this._editRuleGridComp.unSelectionAll('COL'); // unselect all columns in current grid
this.ruleVO.command = ''; // remove currently selected rule component (set선택된 상태에서 set을 edit하면 에러나는 문제 해결)
// set current index (when editing subtract 1 from index)
const ruleIdx = editInfo.ruleNo-1;
// 인풋박스 포커스 여부 IE 에서 수정버튼을 누르면 툴팁 박스가 열려서...
this.isFocus = false;
this._setEditRuleInfo({op: 'PREPARE_UPDATE', ruleIdx: ruleIdx, count: 100, offset: 0})
.then((data: { apiData: any, gridData: any }) => {
if (data['error']) {
const prepError = this.dataprepExceptionHandler(data['error']);
PreparationAlert.output(prepError, this.translateService.instant(prepError.message));
return;
}
this.setEditInfo(editInfo, data.gridData);
this.opString = 'UPDATE';
this.serverSyncIndex = ruleIdx+1;
this.setRuleListColorWhenJumped(this.serverSyncIndex);
this.setCancelBtnWhenEditMode(this.serverSyncIndex);
});
} // function - setRuleVO
/**
* Delete rule
* @param {number} ruleNo
*/
public deleteRule(ruleNo : number) {
this.serverSyncIndex = ruleNo;
this.refreshEditMode();
this.applyRule({ op: 'DELETE', ruleIdx: this.serverSyncIndex, count:100 });
}
/**
* Init edit mode
*/
public refreshEditMode() {
this.opString = 'APPEND';
this.inputRuleCmd = '';
this.editorUseFlag = false;
}
/**
* Change to different dataset in same dataflow
* @param dataset {PrDataset}
*/
public changeWrangledDataset(dataset : PrDataset) {
this.dataflowModelService.setSelectedDsId(this.dsId);
this.dataflowModelService.setSelectedDsType(DsType.WRANGLED);
window.location.href = CommonConstant.API_CONSTANT.BASE_URL +`management/datapreparation/dataflow/${this.dataflow.dfId}/rule/${dataset.dsId}` ;
}
/**
* Redo or undo
* @param {string} action
*/
public transformAction(action : string) {
this.refreshEditMode();
const rule = { op: action };
if (action === 'UNDO') {
if (!this.undoable) {
return;
}
if (this.isUndoRunning === false) {
this.isUndoRunning = true;
}
} else if (action === 'REDO') {
if (!this.redoable) {
return;
}
if (this.isRedoRunning === false) {
this.isRedoRunning = true;
}
}
this.applyRule(rule, action === 'UNDO')
}
/**
* Jump Action
* @param idx - from rule list
*/
public jump(idx: number) {
// Change edit mode to false
this.refreshEditMode();
// clear all selected columns and rows
this._editRuleGridComp.unSelectionAll();
this.loadingShow();
this.opString = 'JUMP';
// Get grid of selected index
this._setEditRuleInfo({op: this.opString, ruleIdx: idx, count: 100 }).then((data) => {
if (data['error']) {
const prepError = this.dataprepExceptionHandler(data['error']);
PreparationAlert.output(prepError, this.translateService.instant(prepError.message));
return;
}
// set affected columns
data.apiData.gridResponse.interestedColNames.forEach(col => {
if ('' !== this._editRuleGridComp.getColumnUUIDByColumnName(col)) {
this._editRuleGridComp.selectColumn(this._editRuleGridComp.getColumnUUIDByColumnName(col), true);
}
});
this.loadingHide();
this.serverSyncIndex = data.apiData.ruleCurIdx;
this.setRuleListColorWhenJumped(this.serverSyncIndex);
this.ruleListComponent.selectedRuleIdx = this.serverSyncIndex;
});
}
/**
* Add cancel button to insert step index
* @param {number} idx
*/
public setInsertStep(idx : number ) {
this.ruleList[idx]['isInsertStep'] = true;
}
/**
* When insert step button is clicked from rule list, jump to selected index
* @param {number} ruleNo
*/
public insertStep(ruleNo: number) {
this.opString = 'INITIAL';
this.jumpToInsertStep(ruleNo);
}
/**
* Jump but op is GET
* @param idx - from rule list
*/
public jumpToInsertStep(idx: number) {
// clear all selected columns and rows
this._editRuleGridComp.unSelectionAll();
const tempOpString = this.opString;
this.loadingShow();
// Get grid of selected index
this._setEditRuleInfo({op: tempOpString, ruleIdx: idx, count: 100, offset: 0 }).then((data) => {
if (data['error']) {
const prepError = this.dataprepExceptionHandler(data['error']);
PreparationAlert.output(prepError, this.translateService.instant(prepError.message));
return;
}
// set affected columns
data.apiData.gridResponse.interestedColNames.forEach(col => {
if ('' !== this._editRuleGridComp.getColumnUUIDByColumnName(col)) {
this._editRuleGridComp.selectColumn(this._editRuleGridComp.getColumnUUIDByColumnName(col), true);
}
});
this.loadingHide();
this.serverSyncIndex = data.apiData.ruleCurIdx;
// set rule list color (-1 as 1 was added to match server list index before sending API)
this.setRuleListColorWhenJumped(this.serverSyncIndex);
this.opString = 'APPEND';
this.setInsertStep(this.serverSyncIndex);
});
}
/**
* Command list 에서 Mouseover 일때 Selected = true, mouseleave 일때 selected = false
* @param event 이벤트
* @param index
*/
public commandListHover(event, index) {
if (!this.flag) {
if (event.type === 'mouseover') {
this.filteredCommandList[index].isHover = true;
} else if (event.type === 'mouseout') {
this.initSelectedCommand(this.filteredCommandList);
}
}
} // function - commandListHover
/**
* Select box for commands - navigate with keyboard
* @param event 이벤트
* @param currentList 현재 사용하는 리스트
* @param clickHandler
*/
public navigateWithKeyboardShortList(event, currentList, clickHandler) {
// open select box when arrow up/ arrow down is pressed
if (event.keyCode === 38 || event.keyCode === 40) {
switch (clickHandler) {
case 'command':
if (!this.isCommandListShow) {
this.isCommandListShow = true;
setTimeout(() => $('#commandSearch').trigger('focus')); // 포커스
}
break;
}
}
// when there is no element in the list
if (currentList.length === 0) {
return;
}
// set scroll height
let height = 25;
if (clickHandler === 'command') {
height = 50;
}
// this.commandList 에 마지막 인덱스
const lastIndex = currentList.length - 1;
// command List 에서 selected 된 index 를 찾는다
const idx = currentList.findIndex((command) => {
if (command.isHover) {
return command;
}
});
// when Arrow up is pressed
if (event.keyCode === 38) {
// 선택된게 없다
if (idx === -1) {
// 리스트에 마지막 인덱스를 selected 로 바꾼다
currentList[lastIndex].isHover = true;
// 스크롤을 마지막으로 보낸다
$('.ddp-list-command').scrollTop(lastIndex * height);
// 리스트에서 가장 첫번쨰가 선택되어 있는데 arrow up 을 누르면 리스트에 마지막으로 보낸다
} else if (idx === 0) {
currentList[0].isHover = false;
currentList[lastIndex].isHover = true;
// 스크롤을 마지막으로 보낸다
$('.ddp-list-command').scrollTop(lastIndex * height);
} else {
currentList[idx].isHover = false;
currentList[idx - 1].isHover = true;
$('.ddp-list-command').scrollTop((idx - 1) * height);
}
// when Arrow down is pressed
} else if (event.keyCode === 40) {
// 리스트에 첫번째 인텍스를 selected 로 바꾼다
if (idx === -1) {
currentList[0].isHover = true;
// 리스트에서 가장 마지막이 선택되어 있는데 arrow down 을 누르면 다시 리스트 0번째로 이동한다
} else if (idx === lastIndex) {
currentList[0].isHover = true;
currentList[lastIndex].isHover = false;
$('.ddp-list-command').scrollTop(0);
} else {
currentList[idx].isHover = false;
currentList[idx + 1].isHover = true;
$('.ddp-list-command').scrollTop((idx + 1) * height);
}
}
// enter
if (event.keyCode === 13) {
// selected 된 index 를 찾는다
const hoverIdx = currentList.findIndex((command) => {
if (command.isHover) {
return command;
}
});
this.selectCommand(event, currentList[hoverIdx]);
$('[tabindex=1]').trigger('focus');
// 스크롤, command select 초기화
this.initSelectedCommand(currentList);
}
}
// noinspection JSMethodCanBeStatic
/**
* change commandList selected -> false (초기화)
*/
public initSelectedCommand(list) {
list.forEach((item) => {
return item.isHover = false;
})
} // function - initSelectedCommand
/**
* Set colour to rule list when jumped
* @param idx
*/
public setRuleListColorWhenJumped(idx : number) {
this.ruleList.forEach((item, index) => {
item.isValid = !(index === idx || index < idx);
});
}
/**
* Add cancel button to specific rule when edit button is clicked
* @param {number} idx
*/
public setCancelBtnWhenEditMode(idx : number) {
this.ruleList[idx]['isEditMode'] = true;
}
/**
* Open advanced formula input popup (set, keep, derive, delete)
* @param data
*/
public openPopupFormulaInput(data: {command : string, val : string, needCol?:boolean}) {
const fields: Field[] = this.selectedDataSet.gridData.fields;
data.val = this._editRuleComp.getValue( data.val );
this.extendInputFormulaComponent.open(fields, data);
}
/**
* Apply formula using Advanced formula popup
* @param {{command: string, formula: string}} data
*/
public doneInputFormula(data: { command: string, formula: string }) {
this._editRuleComp.doneInputFormula(data);
}
/**
* Multicolumn rename popup open
*/
public onMultiColumnRenameClick() {
if ('UPDATE' === this.opString) {
this.multipleRenamePopupComponent.init({gridData: _.cloneDeep(this.selectedDataSet.gridData),
dsName: this.dsName,
typeDesc: this.selectedDataSet.gridResponse.colDescs,
editInfo: {ruleCurIdx: this.ruleVO['ruleNo'],
cols: this.ruleVO.cols,
to: [this.ruleVO.to]}
});
} else {
this.multipleRenamePopupComponent.init({gridData: _.cloneDeep(this.selectedDataSet.gridData),
dsName: this.dsName, typeDesc: this.selectedDataSet.gridResponse.colDescs});
}
}
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
| Public Method - for EditRuleGrid
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
/**
* 그리드 헤더 클릭을 통해서 룰 정보 설정
* @param {{id:string, isSelect:boolean, column: any, field: any}} data
*/
public setRuleInfoFromGridHeader(data: { id: string, isSelect: boolean, columns: any, fields: any }) {
this.selectedColumns = data.columns;
} // function - setRuleInfoFromGridHeader
/**
* Context Menu Rule 적용 이벤트
* @param data
*/
public onContextMenuInfo(data) {
if (data.more) {
this.ruleVO.command = data.more.command;
this.safelyDetectChanges();
const selCols = this.selectedDataSet.gridData.fields.filter( item => -1 < data.more.col.indexOf( item.uuid ) );
if (data.more.command === 'setformat') {
const colDescs = this.selectedDataSet.gridResponse.colDescs.filter((item) => {
return item.type === 'TIMESTAMP'
});
this._editRuleComp.setValue('dsId', this.selectedDataSet.dsId);
this._editRuleComp.setValue('colTypes', colDescs);
}
if (data.more.command === 'move') {
this._editRuleComp.init(this.selectedDataSet.gridData.fields, selCols, {jsonRuleString : data.more});
}
if (data.more.command === 'set') {
if (data.more.contextMenu) {
this._editRuleComp.init(this.selectedDataSet.gridData.fields, selCols, {jsonRuleString : data.more});
} else {
this._editRuleComp.init(this.selectedDataSet.gridData.fields, selCols);
}
}
if (data.more.command === 'settype') {
this._editRuleComp.setValue('colTypes', this.selectedDataSet.gridResponse.colDescs);
this._editRuleComp.setValue('dsId', this.selectedDataSet.dsId);
this._editRuleComp.setValue('selectedType', data.more.type);
const idx = this.selectedDataSet.gridResponse.colDescs.findIndex((item) => {
return item.type === data.more.type.toUpperCase();
});
this._editRuleComp.setValue('defaultIndex', idx);
}
const ruleNames : string[] = ['move', 'set'];
if (-1 === ruleNames.indexOf(data.more.command)) {
this._editRuleComp.init(this.selectedDataSet.gridData.fields, selCols);
}
} else {
data['ruleCurIdx'] = this.opString === 'UPDATE' ? this.serverSyncIndex-1 : this.serverSyncIndex;
data['op'] = this.opString === 'UPDATE' ? 'UPDATE' : 'APPEND';
this.applyRule(data);
}
} // function - applyRuleFromContextMenu
/**
* When cancel button is clicked. Cancels edit mode and jump to current index
*/
public jumpToCurrentIndex() {
if (this.inputRuleCmd !== '') {
this.inputRuleCmd = ''; // Empty builder rule string
} else {
if (!this.editorUseFlag) {
// If no command is selected nothing happens
if (this.ruleVO.command === '' || isNullOrUndefined(this.ruleVO.command)) {
return;
}
}
}
// Change button
this.opString = 'APPEND';
// Unselect all columns
this._editRuleGridComp.unSelectionAll();
// Jumps to current index
this.jump(this.serverSyncIndex);
// TODO : check if necessary
this.ruleVO.command = '';
this.selectedColumns = [];
this.editColumnList = [];
}
/**
* Apply multi column rename
* @param data
*/
public onRenameMultiColumns(data) {
if (isNullOrUndefined(data) ) { // Cancel rename if nothing is changed
this.jumpToCurrentIndex()
} else {
this.applyRule(data);
}
}
/**
* apply rule with enter key
*/
@HostListener('document:keydown.enter', ['$event'])
public onEnterKeydownHandler(event: KeyboardEvent) {
if( !isNullOrUndefined( this.ruleVO.command ) && ( 'BODY' === event.target['tagName'] || 0 < $( event.target ).closest( '.ddp-wrap-addrule' ).length ) ) {
if (this.multipleRenamePopupComponent.isPopupOpen
|| this.createSnapshotPopup.isShow
|| this.extendInputFormulaComponent.isShow) {
return;
}
if (event.keyCode === 13) {
this.addRule();
}
}
} // function - onEnterKeydownHandler
/**
* Close selecbox or popup with esc
*/
@HostListener('document:keydown.escape', ['$event'])
public onKeydownHandler(event: KeyboardEvent) {
if (event.keyCode === 27) {
(this.isRuleUnionModalShow) && (this.ruleVO.command = '');
(this.isRuleJoinModalShow) && (this.ruleVO.command = '');
this.isRuleUnionModalShow = false;
this.isRuleJoinModalShow = false;
this.isCommandListShow = false;
this.isUpdate = false;
}
}
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
| Private Method
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
/**
* 룰 편집 정보 설정
* @param {any} params
* @returns {Promise<any>}
* @private
*/
private _setEditRuleInfo( params : any ): Promise<any> {
this.loadingShow();
this.isInitDataLoaded = true;
this.safelyDetectChanges();
return this._editRuleGridComp.init(this.dsId, params)
.then((data: { apiData: any, gridData: any }) => {
if (isNullOrUndefined(data.apiData)) {
// remove ` from field name when error
this.selectedDataSet.gridData.fields.filter((item) => {
return item.name = item.name.replace(/`/g, '');
});
return {
error : data['error']
}
}
const apiData = data.apiData;
this.serverSyncIndex = data.apiData.ruleCurIdx;
if (apiData.errorMsg) {
this.loadingHide();
Alert.warning(this.translateService.instant('msg.dp.alert.ds.retrieve.fail'));
} else {
this.selectedDataSet = apiData;
this.selectedDataSet.gridData = data.gridData;
this.selectedDataSet.dsId = this.dsId;
this.selectedDataSet.dsName = this.dsName;
// Set rule list
this.setRuleList(apiData['transformRules']);
this.isAggregationIncluded = this.hasAggregation();
// init ruleVO
this.initRule(apiData);
this.loadingHide();
}
return data;
})
} // function - _setEditRuleInfo
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
| Private Method
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
/**
* Set rule list
*/
private setRuleList(rules: any) {
this.ruleList = [];
const commandNames = [];
this.commandList.forEach((command) => {
commandNames.push(command.command);
});
// ruleStringInfos
rules.forEach((rule) => {
rule['ruleVO'] = JSON.parse(rule['jsonRuleString']);
rule['ruleVO']['command'] = rule['ruleVO']['name'];
rule['ruleVO']['ruleNo'] = rule['ruleNo'];
const idx = commandNames.indexOf(rule['ruleVO'].command);
if (idx > -1) {
rule['command'] = this.commandList[idx].command;
rule['alias'] = this.commandList[idx].alias;
rule['desc'] = this.commandList[idx].desc;
if (rule.shortRuleString) {
rule['simplifiedRule'] = rule.shortRuleString
} else {
const ruleStr = PreparationCommonUtil.simplifyRule(rule['ruleVO'], this.selectedDataSet.gridResponse.slaveDsNameMap, rule.ruleString)
if (!isUndefined(ruleStr)) {
rule['simplifiedRule'] = ruleStr;
} else {
rule['simplifiedRule'] = rule.ruleString;
}
}
} else {
rule['simplifiedRule'] = rule.shortRuleString ? rule.shortRuleString : rule.ruleString;
rule['command'] = 'Create';
rule['alias'] = 'Cr';
rule['desc'] = '';
}
this.ruleList.push(rule);
});
}
private _initialiseValues() {
this.commandList = [
{
command: 'header',
alias: 'He',
desc: this.translateService.instant('msg.dp.li.he.description'),
isHover: false,
command_h: ['ㅗㄷㅁㅇㄷㄱ']
},
{ command: 'keep',
alias: 'Ke',
desc: this.translateService.instant('msg.dp.li.ke.description'),
isHover: false,
command_h: ['ㅏㄸ','ㅏ떼','ㅏㄷ','ㅏㄷ데']
},
{
command: 'replace',
alias: 'Rp',
desc: this.translateService.instant('msg.dp.li.rp.description'),
isHover: false,
command_h: ['ㄱㄷ','ㄱ데ㅣㅁㅊㄷ']
},
{
command: 'rename',
alias: 'Rn',
desc: this.translateService.instant('msg.dp.li.rn.description'),
isHover: false,
command_h: ['ㄱㄷ','ㄱ둠','ㄱ두므','ㄱ두믇']
},
{ command: 'set',
alias: 'Se',
desc: this.translateService.instant('msg.dp.li.se.description'),
isHover: false,
command_h: ['ㄴㄷㅅ']
},
{
command: 'settype',
alias: 'St',
desc: this.translateService.instant('msg.dp.li.st.description'),
isHover: false,
command_h: ['ㄴㄷㅆ','ㄴㄷ쑈ㅔㄷ','ㄴㄷㅅ','ㄴㄷㅅ쇼ㅔㄷ']
},
{
command: 'countpattern',
alias: 'Co',
desc: this.translateService.instant('msg.dp.li.co.description'),
isHover: false,
command_h: ['ㅊ','채ㅕㅜㅅ','채ㅕㅜ세','채ㅕㅜ셈ㅆㄷㄱ','채ㅕㅜ셈ㅆㄷ구','채ㅕㅜ셈ㅅㅅㄷ','채ㅕㅜ셈ㅅㅅㄷ구']
},
{
command: 'split',
alias: 'Sp',
desc: this.translateService.instant('msg.dp.li.sp.description'),
isHover: false,
command_h: ['ㄴ','네ㅣㅑㅅ']
},
{
command: 'derive',
alias: 'Dr',
desc: this.translateService.instant('msg.dp.li.dr.description'),
isHover: false,
command_h: ['ㅇㄷ갸','ㅇㄷ걒ㄷ']
},
{
command: 'delete',
alias: 'De',
desc: this.translateService.instant('msg.dp.li.de.description'),
isHover: false,
command_h: ['ㅇㄷ','ㅇ디','ㅇ딛ㅅㄷ']
},
{ command: 'drop',
alias: 'Dp',
desc: this.translateService.instant('msg.dp.li.dp.description'),
isHover: false,
command_h: ['ㅇㄱ','ㅇ개ㅔ']
},
{
command: 'pivot',
alias: 'Pv',
desc: this.translateService.instant('msg.dp.li.pv.description'),
isHover: false,
command_h: ['ㅔㅑㅍ','ㅔㅑ패','ㅔㅑ팻']
},
{
command: 'unpivot',
alias: 'Up',
desc: this.translateService.instant('msg.dp.li.up.description'),
isHover: false,
command_h: ['ㅕㅜ','ㅕㅞㅑ','ㅕㅞㅑㅍ','ㅕㅞㅑ패','ㅕㅞㅑ팻']
},
{ command: 'join',
alias: 'Jo',
desc: this.translateService.instant('msg.dp.li.jo.description'),
isHover: false,
command_h: ['ㅓㅐㅑㅜ']
},
{
command: 'extract',
alias: 'Ex',
desc: this.translateService.instant('msg.dp.li.ex.description'),
isHover: false,
command_h: ['ㄷㅌㅅㄱㅁㅊㅅ']
},
{
command: 'flatten',
alias: 'Fl',
desc: this.translateService.instant('msg.dp.li.fl.description'),
isHover: false,
command_h: ['ㄹ','리','림ㅆㄷ','림ㅆ두','림ㅅㅅㄷ','림ㅅㅅ두']
},
{
command: 'merge',
alias: 'Me',
desc: this.translateService.instant('msg.dp.li.me.description'),
isHover: false,
command_h: ['ㅡㄷㄱㅎㄷ']
},
{ command: 'nest',
alias: 'Ne',
desc: this.translateService.instant('msg.dp.li.ne.description'),
isHover: false,
command_h: ['ㅜㄷㄴㅅ']
},
{
command: 'unnest',
alias: 'Un',
desc: this.translateService.instant('msg.dp.li.un.description'),
isHover: false,
command_h: ['ㅕㅜㅜㄷㄴㅅ']
},
{
command: 'aggregate',
alias: 'Ag',
desc: this.translateService.instant('msg.dp.li.ag.description'),
isHover: false,
command_h: ['ㅁㅎㅎㄱㄷㅎㅁㅅㄷ']
},
{
command: 'sort',
alias: 'So',
desc: this.translateService.instant('msg.dp.li.so.description'),
isHover: false,
command_h: ['ㄴ','내','낵','낷']
},
{
command: 'move',
alias: 'Mv',
desc: this.translateService.instant('msg.dp.li.mv.description'),
isHover: false,
command_h: ['ㅡㅐㅍㄷ']
},
{
command: 'union',
alias: 'Ui',
desc: this.translateService.instant('msg.dp.li.ui.description'),
isHover: false,
command_h: ['ㅕㅜㅑㅐㅜ']
},
{
command: 'setformat',
alias: 'Sf',
desc: this.translateService.instant('msg.dp.li.sf.description'),
isHover: false,
command_h: ['ㄴㄷㅅ랙','ㄴㄷㅅ래그','ㄴㄷㅅ래금ㅅ']
},
{
command: 'window',
alias: 'Wn',
desc: this.translateService.instant('msg.dp.li.wd.description'),
isHover: false,
command_h: ['ㅈ','쟈ㅜㅇ','쟈ㅜ애','쟈ㅜ앶']
}
];
// set rule
if (this.selectedDataSet && this.selectedDataSet.rules && this.selectedDataSet.rules.length > 0) {
this.setRuleList(this.selectedDataSet.rules);
this.isAggregationIncluded = this.hasAggregation();
}
// init ruleVO
this.ruleVO.command = '';
}
/**
* apply rule
* @rule Rule
* @msg translate key
* @isUndo
*/
private applyRule(rule: object, isUndo?: boolean) {
const command = rule['command'];
// Save current scroll position
this._editRuleGridComp.savePosition();
this.loadingShow();
this.changeDetect.detectChanges();
this.isJumped = false;
this.opString = rule['op'];
const param = {
op: this.opString,
ruleIdx : rule['ruleIdx'] ? rule['ruleIdx'] : this.serverSyncIndex,
count: 100,
ruleString : rule['ruleString'],
uiRuleString : JSON.stringify(rule['uiRuleString'])
};
this._setEditRuleInfo(param).then((data: { apiData: any, gridData: any }) => {
this._isExecAddRule = false;
if (data['error']) {
const prepError = this.dataprepExceptionHandler(data['error']);
PreparationAlert.output(prepError, this.translateService.instant(prepError.message));
return;
}
// TODO : need to refresh selected column after applying rule
this._editRuleGridComp.unSelectionAll();
this.serverSyncIndex = data.apiData.ruleCurIdx;
// if (data.apiData.ruleStringInfos.length > 0) {
if (data.apiData.transformRules.length > 0) {
this._editRuleGridComp.setAffectedColumns(
data.apiData.gridResponse['interestedColNames'],
// data.apiData.ruleStringInfos[data.apiData.ruleStringInfos.length - 1].command);
data.apiData.transformRules[data.apiData.transformRules.length - 1].command);
}
// 룰 리스트는 index 보다 하나 적게
this.setRuleListColorWhenJumped(this.serverSyncIndex);
this.ruleListComponent.selectedRuleIdx = this.serverSyncIndex;
if (command !== 'join' && command !== 'derive' && command !== 'aggregate' && command !== 'move') {
// 저장된 위치로 이동
// this._editRuleGridComp.moveToSavedPosition();
}
// 계속 클릭하는거 방지
if (isUndo && this.isUndoRunning) {
this.isUndoRunning = false;
} else if (!isUndo && this.isRedoRunning) {
this.isRedoRunning = false;
}
this.inputRuleCmd = '';
});
}
/**
* Set join info when editing
* @param rule
*/
private setJoinEditInfo(rule) {
const jsonRuleString = JSON.parse(rule['jsonRuleString']);
// this.rightDataset = new Dataset();
this.rightDataset = new PrDataset();
// is it append or update ?
this.rightDataset.joinButtonText = 'Edit Join';
// 수정시 필요한 룰넘버
this.rightDataset.ruleNo = rule['ruleNo'];
// dataset id
this.rightDataset.dsId = jsonRuleString.dataset2;
this.rightDataset.selectedJoinType = jsonRuleString.joinType;
this.rightDataset.rightSelectCol = jsonRuleString.rightCol;
this.rightDataset.leftSelectCol = jsonRuleString.leftCol;
this.rightDataset.joinRuleList = [];
jsonRuleString.leftJoinKey.forEach((item,index) => {
const info = new JoinInfo();
info.leftJoinKey = item;
info.rightJoinKey = jsonRuleString.rightJoinKey[index];
this.rightDataset.joinRuleList.push(info);
});
this.isRuleJoinModalShow = true;
this.changeDetect.detectChanges();
}
/**
* Check if rule list contains aggregate rule
* returns true is rule list contains aggregate rule
* @returns {boolean}
*/
private hasAggregation() {
// clone ruleList
let rules = [...this.ruleList];
// Only use up to serverSyncIndex
rules = rules.splice(0,this.serverSyncIndex+1);
const idx: number = rules.findIndex((item) => {
return item.valid && item.command === 'aggregate';
});
return !(idx === -1);
}
private _setSplit() {
this._split = [];
this._split.push(Split(['.rule-left', '.rule-right'], {
sizes: [80, 20],
minSize: [300, 300],
onDragEnd: (() => {
this._editRuleGridComp.resizeGrid();
}),
onDragStart: (() => {
this._editRuleGridComp.gridAllContextClose();
})
})
);
this._split.push(Split(['.rule-top', '.rule-bottom'], {
direction: 'vertical',
sizes: [75, 25],
minSize: [400, 110],
onDragEnd: (() => {
this._editRuleGridComp.resizeGrid();
}),
onDragStart: (() => {
this._editRuleGridComp.gridAllContextClose();
})
}));
}
private _destroySplit() {
this._split.forEach((item) => {
item.destroy();
});
this._split = [];
}
/**
* Get dataflow info (API)
* @private
*/
private _getDataflow() {
return new Promise<any>((resolve, reject) => {
this.dataflowService.getDataflow(this.dfId).then(result => {
resolve(result);
}).catch((err) => reject(err));
});
}
/**
* Get dataset info (API)
* @private
*/
private _getDataset() {
return new Promise<any>((resolve, reject) => {
this.dataflowService.getDataset(this.dsId).then(result => {
resolve(result);
}).catch((err) => reject(err));
});
}
/**
* Get dataflow and dataset info from server (promise)
* @private
*/
private _getDataflowAndDataset() {
const promise = [];
promise.push(this._getDataflow());
promise.push(this._getDataset());
Promise.all(promise).then((result) => {
console.log('promise finishe -->', result);
this.dataflow = result[0];
this.dsName = result[1].dsName;
this.dsList = result[0].datasets;
// set rule
if (this.selectedDataSet && this.selectedDataSet.rules && this.selectedDataSet.rules.length > 0) {
this.setRuleList(this.selectedDataSet.rules);
this.isAggregationIncluded = this.hasAggregation();
}
// init ruleVO
this.ruleVO.command = '';
this._setEditRuleInfo({op:'INITIAL', ruleIdx: null, count: 100, offset: 0}).then((data)=> {
if (data['error']) {
const prepError = this.dataprepExceptionHandler(data['error']);
PreparationAlert.output(prepError, this.translateService.instant(prepError.message));
return;
}
this.serverSyncIndex = data.apiData.ruleCurIdx;
this.ruleListComponent.selectedRuleIdx = this.serverSyncIndex; // 처음 들어갔을 때 전에 jump 한 곳으로 나와야 하기 떄문에
this.setRuleListColorWhenJumped(this.serverSyncIndex);
});
}).catch(() => {
// 로딩 종료
this.loadingHide();
});
}
}
class JoinInfo {
public leftJoinKey: string;
public rightJoinKey: string;
} | the_stack |
declare module "typescript" {
const enum Ternary {
False = 0,
Maybe = 1,
True = -1,
}
const enum Comparison {
LessThan = -1,
EqualTo = 0,
GreaterThan = 1,
}
interface StringSet extends Map<any> {
}
function forEach<T, U>(array: T[], callback: (element: T, index: number) => U): U;
function contains<T>(array: T[], value: T): boolean;
function indexOf<T>(array: T[], value: T): number;
function countWhere<T>(array: T[], predicate: (x: T) => boolean): number;
function filter<T>(array: T[], f: (x: T) => boolean): T[];
function map<T, U>(array: T[], f: (x: T) => U): U[];
function concatenate<T>(array1: T[], array2: T[]): T[];
function deduplicate<T>(array: T[]): T[];
function sum(array: any[], prop: string): number;
/**
* Returns the last element of an array if non-empty, undefined otherwise.
*/
function lastOrUndefined<T>(array: T[]): T;
function binarySearch(array: number[], value: number): number;
function hasProperty<T>(map: Map<T>, key: string): boolean;
function getProperty<T>(map: Map<T>, key: string): T;
function isEmpty<T>(map: Map<T>): boolean;
function clone<T>(object: T): T;
function extend<T>(first: Map<T>, second: Map<T>): Map<T>;
function forEachValue<T, U>(map: Map<T>, callback: (value: T) => U): U;
function forEachKey<T, U>(map: Map<T>, callback: (key: string) => U): U;
function lookUp<T>(map: Map<T>, key: string): T;
function mapToArray<T>(map: Map<T>): T[];
function copyMap<T>(source: Map<T>, target: Map<T>): void;
/**
* Creates a map from the elements of an array.
*
* @param array the array of input elements.
* @param makeKey a function that produces a key for a given element.
*
* This function makes no effort to avoid collisions; if any two elements produce
* the same key with the given 'makeKey' function, then the element with the higher
* index in the array will be the one associated with the produced key.
*/
function arrayToMap<T>(array: T[], makeKey: (value: T) => string): Map<T>;
var localizedDiagnosticMessages: Map<string>;
function getLocaleSpecificMessage(message: string): string;
function createFileDiagnostic(file: SourceFile, start: number, length: number, message: DiagnosticMessage, ...args: any[]): Diagnostic;
function createCompilerDiagnostic(message: DiagnosticMessage, ...args: any[]): Diagnostic;
function chainDiagnosticMessages(details: DiagnosticMessageChain, message: DiagnosticMessage, ...args: any[]): DiagnosticMessageChain;
function concatenateDiagnosticMessageChains(headChain: DiagnosticMessageChain, tailChain: DiagnosticMessageChain): DiagnosticMessageChain;
function flattenDiagnosticChain(file: SourceFile, start: number, length: number, diagnosticChain: DiagnosticMessageChain, newLine: string): Diagnostic;
function compareValues<T>(a: T, b: T): Comparison;
function compareDiagnostics(d1: Diagnostic, d2: Diagnostic): number;
function deduplicateSortedDiagnostics(diagnostics: Diagnostic[]): Diagnostic[];
function normalizeSlashes(path: string): string;
function getRootLength(path: string): number;
var directorySeparator: string;
function normalizePath(path: string): string;
function getDirectoryPath(path: string): string;
function isUrl(path: string): boolean;
function isRootedDiskPath(path: string): boolean;
function getNormalizedPathComponents(path: string, currentDirectory: string): string[];
function getNormalizedAbsolutePath(filename: string, currentDirectory: string): string;
function getNormalizedPathFromPathComponents(pathComponents: string[]): string;
function getRelativePathToDirectoryOrUrl(directoryPathOrUrl: string, relativeOrAbsolutePath: string, currentDirectory: string, getCanonicalFileName: (fileName: string) => string, isAbsolutePathAnUrl: boolean): string;
function getBaseFilename(path: string): string;
function combinePaths(path1: string, path2: string): string;
function fileExtensionIs(path: string, extension: string): boolean;
function removeFileExtension(path: string): string;
/**
* Based heavily on the abstract 'Quote' operation from ECMA-262 (24.3.2.2),
* but augmented for a few select characters.
* Note that this doesn't actually wrap the input in double quotes.
*/
function escapeString(s: string): string;
interface ObjectAllocator {
getNodeConstructor(kind: SyntaxKind): new () => Node;
getSymbolConstructor(): new (flags: SymbolFlags, name: string) => Symbol;
getTypeConstructor(): new (checker: TypeChecker, flags: TypeFlags) => Type;
getSignatureConstructor(): new (checker: TypeChecker) => Signature;
}
var objectAllocator: ObjectAllocator;
const enum AssertionLevel {
None = 0,
Normal = 1,
Aggressive = 2,
VeryAggressive = 3,
}
module Debug {
function shouldAssert(level: AssertionLevel): boolean;
function assert(expression: boolean, message?: string, verboseDebugInfo?: () => string): void;
function fail(message?: string): void;
}
}
declare module "typescript" {
interface System {
args: string[];
newLine: string;
useCaseSensitiveFileNames: boolean;
write(s: string): void;
readFile(fileName: string, encoding?: string): string;
writeFile(fileName: string, data: string, writeByteOrderMark?: boolean): void;
watchFile?(fileName: string, callback: (fileName: string) => void): FileWatcher;
resolvePath(path: string): string;
fileExists(path: string): boolean;
directoryExists(path: string): boolean;
createDirectory(directoryName: string): void;
getExecutingFilePath(): string;
getCurrentDirectory(): string;
readDirectory(path: string, extension?: string): string[];
getMemoryUsage?(): number;
exit(exitCode?: number): void;
}
interface FileWatcher {
close(): void;
}
var sys: System;
}
declare module "typescript" {
interface ReferencePathMatchResult {
fileReference?: FileReference;
diagnosticMessage?: DiagnosticMessage;
isNoDefaultLib?: boolean;
}
function getDeclarationOfKind(symbol: Symbol, kind: SyntaxKind): Declaration;
interface StringSymbolWriter extends SymbolWriter {
string(): string;
}
interface EmitHost extends ScriptReferenceHost {
getSourceFiles(): SourceFile[];
isEmitBlocked(sourceFile?: SourceFile): boolean;
getCommonSourceDirectory(): string;
getCanonicalFileName(fileName: string): string;
getNewLine(): string;
writeFile(filename: string, data: string, writeByteOrderMark: boolean, onError?: (message: string) => void): void;
}
function getSingleLineStringWriter(): StringSymbolWriter;
function releaseStringWriter(writer: StringSymbolWriter): void;
function getFullWidth(node: Node): number;
function containsParseError(node: Node): boolean;
function getSourceFileOfNode(node: Node): SourceFile;
function nodePosToString(node: Node): string;
function getStartPosOfNode(node: Node): number;
function nodeIsMissing(node: Node): boolean;
function nodeIsPresent(node: Node): boolean;
function getTokenPosOfNode(node: Node, sourceFile?: SourceFile): number;
function getSourceTextOfNodeFromSourceFile(sourceFile: SourceFile, node: Node): string;
function getTextOfNodeFromSourceText(sourceText: string, node: Node): string;
function getTextOfNode(node: Node): string;
function escapeIdentifier(identifier: string): string;
function unescapeIdentifier(identifier: string): string;
function declarationNameToString(name: DeclarationName): string;
function createDiagnosticForNode(node: Node, message: DiagnosticMessage, arg0?: any, arg1?: any, arg2?: any): Diagnostic;
function createDiagnosticForNodeFromMessageChain(node: Node, messageChain: DiagnosticMessageChain, newLine: string): Diagnostic;
function getErrorSpanForNode(node: Node): Node;
function isExternalModule(file: SourceFile): boolean;
function isDeclarationFile(file: SourceFile): boolean;
function isConstEnumDeclaration(node: Node): boolean;
function getCombinedNodeFlags(node: Node): NodeFlags;
function isConst(node: Node): boolean;
function isLet(node: Node): boolean;
function isPrologueDirective(node: Node): boolean;
function getLeadingCommentRangesOfNode(node: Node, sourceFileOfNode?: SourceFile): CommentRange[];
function getJsDocComments(node: Node, sourceFileOfNode: SourceFile): CommentRange[];
var fullTripleSlashReferencePathRegEx: RegExp;
function forEachReturnStatement<T>(body: Block, visitor: (stmt: ReturnStatement) => T): T;
function isAnyFunction(node: Node): boolean;
function isFunctionBlock(node: Node): boolean;
function isObjectLiteralMethod(node: Node): boolean;
function getContainingFunction(node: Node): FunctionLikeDeclaration;
function getThisContainer(node: Node, includeArrowFunctions: boolean): Node;
function getSuperContainer(node: Node, includeFunctions: boolean): Node;
function getInvokedExpression(node: CallLikeExpression): Expression;
function isExpression(node: Node): boolean;
function isInstantiatedModule(node: ModuleDeclaration, preserveConstEnums: boolean): boolean;
function isExternalModuleImportDeclaration(node: Node): boolean;
function getExternalModuleImportDeclarationExpression(node: Node): Expression;
function isInternalModuleImportDeclaration(node: Node): boolean;
function hasDotDotDotToken(node: Node): boolean;
function hasQuestionToken(node: Node): boolean;
function hasRestParameters(s: SignatureDeclaration): boolean;
function isLiteralKind(kind: SyntaxKind): boolean;
function isTextualLiteralKind(kind: SyntaxKind): boolean;
function isTemplateLiteralKind(kind: SyntaxKind): boolean;
function isBindingPattern(node: Node): boolean;
function isInAmbientContext(node: Node): boolean;
function isDeclaration(node: Node): boolean;
function isStatement(n: Node): boolean;
function isDeclarationOrFunctionExpressionOrCatchVariableName(name: Node): boolean;
function getClassBaseTypeNode(node: ClassDeclaration): TypeReferenceNode;
function getClassImplementedTypeNodes(node: ClassDeclaration): NodeArray<TypeReferenceNode>;
function getInterfaceBaseTypeNodes(node: InterfaceDeclaration): NodeArray<TypeReferenceNode>;
function getHeritageClause(clauses: NodeArray<HeritageClause>, kind: SyntaxKind): HeritageClause;
function tryResolveScriptReference(host: ScriptReferenceHost, sourceFile: SourceFile, reference: FileReference): SourceFile;
function getAncestor(node: Node, kind: SyntaxKind): Node;
function getFileReferenceFromReferencePath(comment: string, commentRange: CommentRange): ReferencePathMatchResult;
function isKeyword(token: SyntaxKind): boolean;
function isTrivia(token: SyntaxKind): boolean;
function isModifier(token: SyntaxKind): boolean;
function createEmitHostFromProgram(program: Program): EmitHost;
function textSpanEnd(span: TextSpan): number;
function textSpanIsEmpty(span: TextSpan): boolean;
function textSpanContainsPosition(span: TextSpan, position: number): boolean;
function textSpanContainsTextSpan(span: TextSpan, other: TextSpan): boolean;
function textSpanOverlapsWith(span: TextSpan, other: TextSpan): boolean;
function textSpanOverlap(span1: TextSpan, span2: TextSpan): TextSpan;
function textSpanIntersectsWithTextSpan(span: TextSpan, other: TextSpan): boolean;
function textSpanIntersectsWith(span: TextSpan, start: number, length: number): boolean;
function textSpanIntersectsWithPosition(span: TextSpan, position: number): boolean;
function textSpanIntersection(span1: TextSpan, span2: TextSpan): TextSpan;
function createTextSpan(start: number, length: number): TextSpan;
function createTextSpanFromBounds(start: number, end: number): TextSpan;
function textChangeRangeNewSpan(range: TextChangeRange): TextSpan;
function textChangeRangeIsUnchanged(range: TextChangeRange): boolean;
function createTextChangeRange(span: TextSpan, newLength: number): TextChangeRange;
var unchangedTextChangeRange: TextChangeRange;
/**
* Called to merge all the changes that occurred across several versions of a script snapshot
* into a single change. i.e. if a user keeps making successive edits to a script we will
* have a text change from V1 to V2, V2 to V3, ..., Vn.
*
* This function will then merge those changes into a single change range valid between V1 and
* Vn.
*/
function collapseTextChangeRangesAcrossMultipleVersions(changes: TextChangeRange[]): TextChangeRange;
}
declare module "typescript" {
var optionDeclarations: CommandLineOption[];
function parseCommandLine(commandLine: string[]): ParsedCommandLine;
function readConfigFile(filename: string): any;
function parseConfigFile(json: any, basePath?: string): ParsedCommandLine;
}
declare module "typescript" {
interface ListItemInfo {
listItemIndex: number;
list: Node;
}
function getEndLinePosition(line: number, sourceFile: SourceFile): number;
function getStartPositionOfLine(line: number, sourceFile: SourceFile): number;
function getStartLinePositionForPosition(position: number, sourceFile: SourceFile): number;
function rangeContainsRange(r1: TextRange, r2: TextRange): boolean;
function startEndContainsRange(start: number, end: number, range: TextRange): boolean;
function rangeContainsStartEnd(range: TextRange, start: number, end: number): boolean;
function rangeOverlapsWithStartEnd(r1: TextRange, start: number, end: number): boolean;
function startEndOverlapsWithStartEnd(start1: number, end1: number, start2: number, end2: number): boolean;
function findListItemInfo(node: Node): ListItemInfo;
function findChildOfKind(n: Node, kind: SyntaxKind, sourceFile?: SourceFile): Node;
function findContainingList(node: Node): Node;
function getTouchingWord(sourceFile: SourceFile, position: number): Node;
function getTouchingPropertyName(sourceFile: SourceFile, position: number): Node;
/** Returns the token if position is in [start, end) or if position === end and includeItemAtEndPosition(token) === true */
function getTouchingToken(sourceFile: SourceFile, position: number, includeItemAtEndPosition?: (n: Node) => boolean): Node;
/** Returns a token if position is in [start-of-leading-trivia, end) */
function getTokenAtPosition(sourceFile: SourceFile, position: number): Node;
/**
* The token on the left of the position is the token that strictly includes the position
* or sits to the left of the cursor if it is on a boundary. For example
*
* fo|o -> will return foo
* foo <comment> |bar -> will return foo
*
*/
function findTokenOnLeftOfPosition(file: SourceFile, position: number): Node;
function findNextToken(previousToken: Node, parent: Node): Node;
function findPrecedingToken(position: number, sourceFile: SourceFile, startNode?: Node): Node;
function getNodeModifiers(node: Node): string;
function getTypeArgumentOrTypeParameterList(node: Node): NodeArray<Node>;
function isToken(n: Node): boolean;
function isComment(kind: SyntaxKind): boolean;
function isPunctuation(kind: SyntaxKind): boolean;
function isInsideTemplateLiteral(node: LiteralExpression, position: number): boolean;
function compareDataObjects(dst: any, src: any): boolean;
}
declare module "typescript" {
function isFirstDeclarationOfSymbolParameter(symbol: Symbol): boolean;
function symbolPart(text: string, symbol: Symbol): SymbolDisplayPart;
function displayPart(text: string, kind: SymbolDisplayPartKind, symbol?: Symbol): SymbolDisplayPart;
function spacePart(): SymbolDisplayPart;
function keywordPart(kind: SyntaxKind): SymbolDisplayPart;
function punctuationPart(kind: SyntaxKind): SymbolDisplayPart;
function operatorPart(kind: SyntaxKind): SymbolDisplayPart;
function textPart(text: string): SymbolDisplayPart;
function lineBreakPart(): SymbolDisplayPart;
function mapToDisplayParts(writeDisplayParts: (writer: DisplayPartsSymbolWriter) => void): SymbolDisplayPart[];
function typeToDisplayParts(typechecker: TypeChecker, type: Type, enclosingDeclaration?: Node, flags?: TypeFormatFlags): SymbolDisplayPart[];
function symbolToDisplayParts(typeChecker: TypeChecker, symbol: Symbol, enclosingDeclaration?: Node, meaning?: SymbolFlags, flags?: SymbolFormatFlags): SymbolDisplayPart[];
function signatureToDisplayParts(typechecker: TypeChecker, signature: Signature, enclosingDeclaration?: Node, flags?: TypeFormatFlags): SymbolDisplayPart[];
} | the_stack |
import * as t from '../../src'
import { either } from 'fp-ts/lib/Either'
//
// helpers
//
type Compact<A> = { [K in keyof A]: A[K] }
/**
* Returns the string literal 'T' if `A` and `B` are equal types, 'F' otherwise
*/
type Equals<A, B> = (<C>() => C extends Compact<A> ? 'T' : 'F') extends <C>() => C extends Compact<B> ? 'T' : 'F'
? 'T'
: 'F'
export const NumberFromString = new t.Type<number, string, unknown>(
'NumberFromString',
t.number.is,
(u, c) =>
either.chain(t.string.validate(u, c), (s) => {
const n = parseFloat(s)
return isNaN(n) ? t.failure(s, c) : t.success(n)
}),
String
)
//
// recursion
//
interface Recursion1 {
type: 'a'
items: Array<Recursion1>
}
const Recursion1: t.Type<Recursion1> = t.recursion('T', () =>
t.type({
type: t.literal('a'),
items: t.array(Recursion1)
})
)
const Recursion1TypeTest = Recursion1 // $ExpectType Type<Recursion1, Recursion1, unknown>
//
// literal
//
const Literal1 = t.literal('a') // $ExpectType LiteralC<"a">
type Literal1TypeTest = t.TypeOf<typeof Literal1> // $ExpectType "a"
type Literal1OutputTest = t.OutputOf<typeof Literal1> // $ExpectType "a"
//
// keyof
//
const Keyof1 = t.keyof({ a: true, b: true }) // $ExpectType KeyofC<{ a: boolean; b: boolean; }>
type Keyof1TypeTest = t.TypeOf<typeof Keyof1> // $ExpectType "a" | "b"
type Keyof1OutputTest = t.OutputOf<typeof Keyof1> // $ExpectType "a" | "b"
//
// refinement
//
const Refinement1 = t.refinement(t.number, (n) => n % 2 === 0) // $ExpectType RefinementC<NumberC>
type Refinement1TypeTest = t.TypeOf<typeof Refinement1> // $ExpectType number
type Refinement1OutputTest = t.OutputOf<typeof Refinement1> // $ExpectType number
const Refinement2 = t.refinement(NumberFromString, (n) => n % 2 === 0) // $ExpectType RefinementC<Type<number, string, unknown>>
type Refinement2TypeTest = t.TypeOf<typeof Refinement2> // $ExpectType number
type Refinement2OutputTest = t.OutputOf<typeof Refinement2> // $ExpectType string
//
// array
//
const Array1 = t.array(t.number) // $ExpectType ArrayC<NumberC>
type Array1TypeTest = t.TypeOf<typeof Array1> // $ExpectType number[]
type Array1OutputTest = t.OutputOf<typeof Array1> // $ExpectType number[]
const Array2 = t.array(NumberFromString) // $ExpectType ArrayC<Type<number, string, unknown>>
type Array2TypeTest = t.TypeOf<typeof Array2> // $ExpectType number[]
type Array2OutputTest = t.OutputOf<typeof Array2> // $ExpectType string[]
//
// type
//
const Type1 = t.type({ a: t.string, b: t.number }) // $ExpectType TypeC<{ a: StringC; b: NumberC; }>
type Type1TypeTest = Equals<t.TypeOf<typeof Type1>, { a: string; b: number }> // $ExpectType "T"
type Type1OutputTest = Equals<t.OutputOf<typeof Type1>, { a: string; b: number }> // $ExpectType "T"
const Type2 = t.type({ a: t.type({ b: t.string }) }) // $ExpectType TypeC<{ a: TypeC<{ b: StringC; }>; }>
type Type2TypeTest = Equals<t.TypeOf<typeof Type2>, { a: { b: string } }> // $ExpectType "T"
type Type2OutputTest = Equals<t.OutputOf<typeof Type2>, { a: { b: string } }> // $ExpectType "T"
const Type3 = t.type({ a: NumberFromString }) // $ExpectType TypeC<{ a: Type<number, string, unknown>; }>
type Type3TypeTest = Equals<t.TypeOf<typeof Type3>, { a: number }> // $ExpectType "T"
type Type3OutputTest = Equals<t.OutputOf<typeof Type3>, { a: string }> // $ExpectType "T"
//
// record
//
const Record1 = t.record(t.keyof({ a: true }), t.number) // $ExpectType RecordC<KeyofC<{ a: boolean; }>, NumberC>
type Record1TypeTest = Equals<t.TypeOf<typeof Record1>, { [K in 'a']: number }> // $ExpectType "T"
type Record1OutputTest = Equals<t.OutputOf<typeof Record1>, { [K in 'a']: number }> // $ExpectType "T"
const Record2 = t.record(t.string, NumberFromString) // $ExpectType RecordC<StringC, Type<number, string, unknown>>
type Record2TypeTest = Equals<t.TypeOf<typeof Record2>, { [K in string]: number }> // $ExpectType "T"
type Record2OutputTest = Equals<t.OutputOf<typeof Record2>, { [K in string]: string }> // $ExpectType "T"
//
// union
//
// $ExpectError
const Union0 = t.union([])
// $ExpectError
const Union1 = t.union([t.boolean])
const Union2 = t.union([t.boolean, t.number]) // $ExpectType UnionC<[BooleanC, NumberC]>
type Union2TypeTest = t.TypeOf<typeof Union2> // $ExpectType number | boolean
type Union2OutputTest = t.OutputOf<typeof Union2> // $ExpectType number | boolean
const Union3 = t.union([t.boolean, NumberFromString]) // $ExpectType UnionC<[BooleanC, Type<number, string, unknown>]>
type Union3TypeTest = t.TypeOf<typeof Union3> // $ExpectType number | boolean
type Union3OutputTest = t.OutputOf<typeof Union3> // $ExpectType string | boolean
//
// intersection
//
const Intersection2 = t.intersection([t.type({ a: t.number }), t.type({ b: t.string })]) // $ExpectType IntersectionC<[TypeC<{ a: NumberC; }>, TypeC<{ b: StringC; }>]>
type Intersection2TypeTest = Equals<t.TypeOf<typeof Intersection2>, { a: number; b: string }> // $ExpectType "T"
type Intersection2OutputTest = Equals<t.OutputOf<typeof Intersection2>, { a: number; b: string }> // $ExpectType "T"
const Intersection3 = t.intersection([t.type({ a: t.number }), t.type({ b: t.string }), t.type({ c: t.boolean })])
const Intersection3Test = Intersection3 // $ExpectType IntersectionC<[TypeC<{ a: NumberC; }>, TypeC<{ b: StringC; }>, TypeC<{ c: BooleanC; }>]>
type Intersection3TypeTest = Equals<t.TypeOf<typeof Intersection3>, { a: number; b: string; c: boolean }> // $ExpectType "T"
type Intersection23OutputTest = Equals<t.OutputOf<typeof Intersection3>, { a: number; b: string; c: boolean }> // $ExpectType "T"
const Intersection4 = t.intersection([
t.type({ a: t.number }),
t.type({ b: t.string }),
t.type({ c: t.boolean }),
t.type({ d: t.null })
])
const Intersection4Test = Intersection4 // $ExpectType IntersectionC<[TypeC<{ a: NumberC; }>, TypeC<{ b: StringC; }>, TypeC<{ c: BooleanC; }>, TypeC<{ d: NullC; }>]>
type Intersection4TypeTest = Equals<t.TypeOf<typeof Intersection4>, { a: number; b: string; c: boolean; d: null }> // $ExpectType "T"
type Intersection43OutputTest = Equals<t.OutputOf<typeof Intersection4>, { a: number; b: string; c: boolean; d: null }> // $ExpectType "T"
const Intersection5 = t.intersection([
t.type({ a: t.number }),
t.type({ b: t.string }),
t.type({ c: t.boolean }),
t.type({ d: t.null }),
t.type({ e: t.undefined })
])
const Intersection5Test = Intersection5 // $ExpectType IntersectionC<[TypeC<{ a: NumberC; }>, TypeC<{ b: StringC; }>, TypeC<{ c: BooleanC; }>, TypeC<{ d: NullC; }>, TypeC<{ e: UndefinedC; }>]>
interface ExpectedIntersection5TypeTest {
a: number
b: string
c: boolean
d: null
e: undefined
}
type Intersection5TypeTest = Equals<t.TypeOf<typeof Intersection5>, ExpectedIntersection5TypeTest> // $ExpectType "T"
interface ExpectedIntersection53OutputTest {
a: number
b: string
c: boolean
d: null
e: undefined
}
type Intersection53OutputTest = Equals<t.OutputOf<typeof Intersection5>, ExpectedIntersection53OutputTest> // $ExpectType "T"
const Intersection6 = t.intersection([t.type({ a: NumberFromString }), t.type({ b: t.string })]) // $ExpectType IntersectionC<[TypeC<{ a: Type<number, string, unknown>; }>, TypeC<{ b: StringC; }>]>
type Intersection6TypeTest = Equals<t.TypeOf<typeof Intersection6>, { a: number; b: string }> // $ExpectType "T"
type Intersection6OutputTest = Equals<t.OutputOf<typeof Intersection6>, { a: string; b: string }> // $ExpectType "T"
// $ExpectError
const Intersection7 = t.intersection([t.string, t.string, t.string, t.string, t.string, t.string])
declare function testIntersectionInput<T>(x: t.Type<Record<keyof T, string>, any, unknown>): void
declare function testIntersectionOuput<T>(x: t.Type<any, Record<keyof T, string>, unknown>): void
const QueryString = t.intersection([
t.type({
a: t.string
}),
t.type({
b: t.number
})
])
// $ExpectError
testIntersectionInput(QueryString)
// $ExpectError
testIntersectionOuput(QueryString)
const IntersectionWithPrimitive = t.intersection([
t.number,
t.type({
a: t.literal('a')
})
])
type IntersectionWithPrimitiveTest = Equals<t.TypeOf<typeof IntersectionWithPrimitive>, number & { a: 'a' }> // $ExpectType "T"
//
// tuple
//
// $ExpectError
const Tuple1 = t.tuple([])
const Tuple2 = t.tuple([t.string]) // $ExpectType TupleC<[StringC]>
type Tuple2TypeTest = t.TypeOf<typeof Tuple2> // $ExpectType [string]
type Tuple2OutputTest = t.OutputOf<typeof Tuple2> // $ExpectType [string]
const Tuple3 = t.tuple([t.string, t.number]) // $ExpectType TupleC<[StringC, NumberC]>
type Tuple3TypeTest = t.TypeOf<typeof Tuple3> // $ExpectType [string, number]
type Tuple3OutputTest = t.OutputOf<typeof Tuple3> // $ExpectType [string, number]
const Tuple4 = t.tuple([t.string, NumberFromString]) // $ExpectType TupleC<[StringC, Type<number, string, unknown>]>
type Tuple4TypeTest = t.TypeOf<typeof Tuple4> // $ExpectType [string, number]
type Tuple4OutputTest = t.OutputOf<typeof Tuple4> // $ExpectType [string, string]
const Tuple5 = t.tuple([t.string, t.number, t.boolean]) // $ExpectType TupleC<[StringC, NumberC, BooleanC]>
type Tuple5TypeTest = t.TypeOf<typeof Tuple5> // $ExpectType [string, number, boolean]
type Tuple5OutputTest = t.OutputOf<typeof Tuple5> // $ExpectType [string, number, boolean]
const Tuple6 = t.tuple([t.string, t.number, t.boolean, t.null]) // $ExpectType TupleC<[StringC, NumberC, BooleanC, NullC]>
type Tuple6TypeTest = t.TypeOf<typeof Tuple6> // $ExpectType [string, number, boolean, null]
type Tuple6OutputTest = t.OutputOf<typeof Tuple6> // $ExpectType [string, number, boolean, null]
const Tuple7 = t.tuple([t.string, t.number, t.boolean, t.null, t.undefined]) // $ExpectType TupleC<[StringC, NumberC, BooleanC, NullC, UndefinedC]>
type Tuple7TypeTest = t.TypeOf<typeof Tuple7> // $ExpectType [string, number, boolean, null, undefined]
type Tuple7OutputTest = t.OutputOf<typeof Tuple7> // $ExpectType [string, number, boolean, null, undefined]
// $ExpectError
const Tuple8 = t.tuple([t.string, t.string, t.string, t.string, t.string, t.string])
//
// partial
//
const Partial1 = t.partial({ a: t.string, b: t.number }) // $ExpectType PartialC<{ a: StringC; b: NumberC; }>
type Partial1TypeTest = Equals<t.TypeOf<typeof Partial1>, { a?: string; b?: number }> // $ExpectType "T"
type Partial1OutputTest = Equals<t.OutputOf<typeof Partial1>, { a?: string; b?: number }> // $ExpectType "T"
const Partial2 = t.partial({ a: t.string, b: NumberFromString }) // $ExpectType PartialC<{ a: StringC; b: Type<number, string, unknown>; }>
type Partial2TypeTest = Equals<t.TypeOf<typeof Partial2>, { a?: string; b?: number }> // $ExpectType "T"
type Partial2OutputTest = Equals<t.OutputOf<typeof Partial2>, { a?: string; b?: string }> // $ExpectType "T"
//
// readonly
//
const Readonly1 = t.readonly(t.type({ a: t.number })) // $ExpectType ReadonlyC<TypeC<{ a: NumberC; }>>
type Readonly1TypeTest = Equals<t.TypeOf<typeof Readonly1>, { readonly a: number }> // $ExpectType "T"
type Readonly1OutputTest = Equals<t.OutputOf<typeof Readonly1>, { readonly a: number }> // $ExpectType "T"
const Readonly2 = t.readonly(t.type({ a: NumberFromString })) // $ExpectType ReadonlyC<TypeC<{ a: Type<number, string, unknown>; }>>
type Readonly2TypeTest = Equals<t.TypeOf<typeof Readonly2>, { readonly a: number }> // $ExpectType "T"
type Readonly2OutputTest = Equals<t.OutputOf<typeof Readonly2>, { readonly a: string }> // $ExpectType "T"
const Readonly3 = t.readonly(t.tuple([t.string, NumberFromString]))
type Readonly3TypeTest = Equals<t.TypeOf<typeof Readonly3>, Readonly<[string, number]>> // $ExpectType "T"
type Readonly3OutputTest = Equals<t.OutputOf<typeof Readonly3>, Readonly<[string, string]>> // $ExpectType "T"
//
// readonlyArray
//
const ReadonlyArray1 = t.readonlyArray(t.number)
type ReadonlyArray1TypeTest = t.TypeOf<typeof ReadonlyArray1> // $ExpectType ReadonlyArray<number>
type ReadonlyArray1OutputTest = t.OutputOf<typeof ReadonlyArray1> // $ExpectType ReadonlyArray<number>
const ReadonlyArray2 = t.readonlyArray(NumberFromString)
type ReadonlyArray2TypeTest = t.TypeOf<typeof ReadonlyArray2> // $ExpectType ReadonlyArray<number>
type ReadonlyArray2OutputTest = t.OutputOf<typeof ReadonlyArray2> // $ExpectType ReadonlyArray<string>
//
// strict
//
const Strict1 = t.strict({ a: t.string, b: t.number }) // $ExpectType ExactC<TypeC<{ a: StringC; b: NumberC; }>>
type Strict1TypeTest = Equals<t.TypeOf<typeof Strict1>, { a: string; b: number }> // $ExpectType "T"
type Strict1OutputTest = Equals<t.OutputOf<typeof Strict1>, { a: string; b: number }> // $ExpectType "T"
const Strict2 = t.strict({ a: t.strict({ b: t.string }) }) // $ExpectType ExactC<TypeC<{ a: ExactC<TypeC<{ b: StringC; }>>; }>>
type Strict2TypeTest = Equals<t.TypeOf<typeof Strict2>, { a: { b: string } }> // $ExpectType "T"
type Strict2OutputTest = Equals<t.OutputOf<typeof Strict2>, { a: { b: string } }> // $ExpectType "T"
const Strict3 = t.strict({ a: NumberFromString }) // $ExpectType ExactC<TypeC<{ a: Type<number, string, unknown>; }>>
type Strict3TypeTest = Equals<t.TypeOf<typeof Strict3>, { a: number }> // $ExpectType "T"
type Strict3OutputTest = Equals<t.OutputOf<typeof Strict3>, { a: string }> // $ExpectType "T"
//
// tagged unions
//
const TaggedUnion1 = t.taggedUnion('type', [
t.type({ type: t.literal('a'), a: t.number }),
t.type({ type: t.literal('b') })
])
const TaggedUnion1Type = TaggedUnion1 // $ExpectType TaggedUnionC<"type", [TypeC<{ type: LiteralC<"a">; a: NumberC; }>, TypeC<{ type: LiteralC<"b">; }>]>
type TaggedUnion1TypeTest = Equals<t.TypeOf<typeof TaggedUnion1>, { type: 'a'; a: number } | { type: 'b' }> // $ExpectType "T"
type TaggedUnion1OutputTest = Equals<t.OutputOf<typeof TaggedUnion1>, { type: 'a'; a: number } | { type: 'b' }> // $ExpectType "T"
interface TaggedUnion2_A {
type: 'a'
b: TaggedUnion2_B | undefined
}
interface TaggedUnion2_B {
type: 'b'
a: TaggedUnion2_A | undefined
}
const TaggedUnion2_A: t.RecursiveType<any, TaggedUnion2_A> = t.recursion<TaggedUnion2_A>('TaggedUnion2_A', (_) =>
t.type({
type: t.literal('a'),
b: t.union([TaggedUnion2_B, t.undefined])
})
)
const TaggedUnion2_B: t.RecursiveType<any, TaggedUnion2_B> = t.recursion<TaggedUnion2_B>('TaggedUnion2_B', (_) =>
t.type({
type: t.literal('b'),
a: t.union([TaggedUnion2_A, t.undefined])
})
)
const TaggedUnion2 = t.taggedUnion('type', [TaggedUnion2_A, TaggedUnion2_B])
type TaggedUnion2TypeTest = Equals<t.TypeOf<typeof TaggedUnion2>, TaggedUnion2_A | TaggedUnion2_B> // $ExpectType "T"
type TaggedUnion2OutputTest = Equals<t.OutputOf<typeof TaggedUnion2>, TaggedUnion2_A | TaggedUnion2_B> // $ExpectType "T"
//
// exact
//
const Exact1 = t.exact(t.type({ a: t.number })) // $ExpectType ExactC<TypeC<{ a: NumberC; }>>
type Exact1TypeTest = Equals<t.TypeOf<typeof Exact1>, { a: number }> // $ExpectType "T"
type Exact1OutputTest = Equals<t.OutputOf<typeof Exact1>, { a: number }> // $ExpectType "T"
const Exact2 = t.exact(t.type({ a: NumberFromString })) // $ExpectType ExactC<TypeC<{ a: Type<number, string, unknown>; }>>
type Exact2TypeTest = Equals<t.TypeOf<typeof Exact2>, { a: number }> // $ExpectType "T"
type Exact2OutputTest = Equals<t.OutputOf<typeof Exact2>, { a: string }> // $ExpectType "T"
//
// clean / alias
//
const C1 = t.type({
a: t.string,
b: NumberFromString
})
interface C1 {
a: string
b: number
}
interface C1O {
a: string
b: string
}
interface C1WithAdditionalProp extends C1 {
c: boolean
}
// $ExpectError
const C2 = t.clean<C1>(C1)
// $ExpectError
const C3 = t.clean<C1WithAdditionalProp, C1O>(C1)
const C4 = t.clean<C1, C1O>(C1)
type CleanTest1 = t.TypeOf<typeof C4> // $ExpectType C1
type CleanTest2 = t.OutputOf<typeof C4> // $ExpectType C1O
const C5 = t.alias(C1)<C1>()
type AliasTest1 = t.TypeOf<typeof C5> // $ExpectType C1
type AliasTest2 = t.OutputOf<typeof C5>['a'] // $ExpectType string
type AliasTest3 = t.OutputOf<typeof C5>['b'] // $ExpectType string
// $ExpectError
const C6 = t.alias(C1)<C1, C1>()
// $ExpectError
const C7 = t.alias(C1)<C1WithAdditionalProp, C1O>()
const C8 = t.alias(C1)<C1, C1O>()
type AliasTest4 = t.TypeOf<typeof C8> // $ExpectType C1
type AliasTest5 = t.OutputOf<typeof C8> // $ExpectType C1O
//
// miscellanea
//
interface GenerableProps {
[key: string]: Generable
}
type GenerableInterface = t.InterfaceType<GenerableProps>
type GenerableStrict = t.StrictType<GenerableProps>
type GenerablePartials = t.PartialType<GenerableProps>
interface GenerableRecord extends t.DictionaryType<Generable, Generable> {}
interface GenerableRefinement extends t.RefinementType<Generable> {}
interface GenerableArray extends t.ArrayType<Generable> {}
interface GenerableUnion extends t.UnionType<Array<Generable>> {}
interface GenerableIntersection extends t.IntersectionType<Array<Generable>> {}
interface GenerableTuple extends t.TupleType<Array<Generable>> {}
interface GenerableReadonly extends t.ReadonlyType<Generable> {}
interface GenerableReadonlyArray extends t.ReadonlyArrayType<Generable> {}
interface GenerableRecursive extends t.RecursiveType<Generable> {}
type Generable =
| t.StringC
| t.NumberC
| t.BooleanType
| GenerableInterface
| GenerableRefinement
| GenerableArray
| GenerableStrict
| GenerablePartials
| GenerableRecord
| GenerableUnion
| GenerableIntersection
| GenerableTuple
| GenerableReadonly
| GenerableReadonlyArray
| t.LiteralType<any>
| t.KeyofType<any>
| GenerableRecursive
| t.UndefinedType
function f(generable: Generable): string {
switch (generable._tag) {
case 'InterfaceType':
return Object.keys(generable.props)
.map((k) => f(generable.props[k]))
.join('/')
case 'StringType':
return 'StringC'
case 'NumberType':
return 'StringC'
case 'BooleanType':
return 'BooleanType'
case 'RefinementType':
return f(generable.type)
case 'ArrayType':
return 'ArrayType'
case 'StrictType':
return 'StrictType'
case 'PartialType':
return 'PartialType'
case 'DictionaryType':
return 'DictionaryType'
case 'UnionType':
return 'UnionType'
case 'IntersectionType':
return 'IntersectionType'
case 'TupleType':
return generable.types.map(f).join('/')
case 'ReadonlyType':
return 'ReadonlyType'
case 'ReadonlyArrayType':
return 'ReadonlyArrayType'
case 'LiteralType':
return 'LiteralType'
case 'KeyofType':
return 'KeyofType'
case 'RecursiveType':
return f(generable.type)
case 'UndefinedType':
return 'UndefinedType'
}
}
const schema = t.type({
a: t.string,
b: t.union([
t.partial({
c: t.string,
d: t.literal('eee')
}),
t.boolean
]),
e: t.intersection([
t.type({
f: t.array(t.string)
}),
t.type({
g: t.union([t.literal('toto'), t.literal('tata')])
})
])
})
f(schema) // OK!
interface Rec {
a: number
b: Rec | undefined
}
const Rec = t.recursion<Rec, Rec, t.mixed, GenerableInterface>('T', (self) =>
t.type({
a: t.number,
b: t.union([self, t.undefined])
})
)
f(Rec) // OK!
// ----------------
export function interfaceWithOptionals<RequiredProps extends t.Props, OptionalProps extends t.Props>(
required: RequiredProps,
optional: OptionalProps,
name?: string
): t.IntersectionC<[t.TypeC<RequiredProps>, t.PartialC<OptionalProps>]> {
return t.intersection([t.type(required), t.partial(optional)], name)
}
// ----------------
export function maybe<RT extends t.Any>(
type: RT,
name?: string
): t.UnionType<[RT, t.NullType], t.TypeOf<RT> | null, t.OutputOf<RT> | null, t.InputOf<RT> | null> {
return t.union<[RT, t.NullType]>([type, t.null], name)
}
// ----------------
import { TaskEither } from 'fp-ts/lib/TaskEither'
// tslint:disable-next-line:strict-export-declare-modifiers
declare function withValidation<L, A>(
type: t.Type<A>,
f: (errors: t.Errors) => L,
fa: TaskEither<L, A>
): TaskEither<L, A>
// tslint:disable-next-line:void-return
declare const fa: TaskEither<string, void>
withValidation(t.void, () => 'validation error', fa)
//
// brand
//
interface PositiveBrand {
readonly Positive: unique symbol
}
const PositiveBad = t.brand(
t.number,
// $ExpectError
(n): n is t.Branded<number, PositiveBrand> => n > 0,
'Bad' // name doesn't match
)
const Positive = t.brand(t.number, (n): n is t.Branded<number, PositiveBrand> => n > 0, 'Positive') // $ExpectType BrandC<NumberC, PositiveBrand>
const PositiveInt = t.intersection([t.Int, Positive])
const Person = t.type({
name: t.string,
age: PositiveInt
})
type Person = t.TypeOf<typeof Person> // $ExpectType { name: string; age: number & Brand<IntBrand> & Brand<PositiveBrand>; }
// $ExpectError
const person: Person = { name: 'name', age: -1.2 }
interface IntBrand2 {
readonly Int: unique symbol
}
const Int2 = t.brand(t.number, (n): n is t.Branded<number, IntBrand2> => Number.isInteger(n), 'Int')
type Int2 = t.TypeOf<typeof Int2> // $ExpectType Branded<number, IntBrand2>
// should be possible to convert a branded type to its carrier type
const toNumber = (n: t.Int): number => n
// $ExpectError
const intToInt2 = (int: t.Int): Int2 => int
//
// UnknownRecord
//
// $ExpectError
const ur1: Record<string, unknown> = [1, 2, 3]
// $ExpectError
const ur2: Record<string, unknown> = new Date()
// tslint:disable: no-construct
// $ExpectError
const ur3: Record<string, unknown> = new Number()
// $ExpectError
const ur4: Record<string, unknown> = new String()
// $ExpectError
const ur5: Record<string, unknown> = new Boolean()
// tslint:enable: no-construct | the_stack |
import { Merged, mergeParams } from '../internal/params_utils.js';
// ================================================================
// "Public" ParamsBuilder API / Documentation
// ================================================================
/**
* Provides doc comments for the methods of CaseParamsBuilder and SubcaseParamsBuilder.
* (Also enforces rough interface match between them.)
*/
export interface ParamsBuilder {
/**
* Expands each item in `this` into zero or more items.
* Each item has its parameters expanded with those returned by the `expander`.
*
* **Note:** When only a single key is being added, use the simpler `expand` for readability.
*
* ```text
* this = [ a , b , c ]
* this.map(expander) = [ f(a) f(b) f(c) ]
* = [[a1, a2, a3] , [ b1 ] , [] ]
* merge and flatten = [ merge(a, a1), merge(a, a2), merge(a, a3), merge(b, b1) ]
* ```
*/
/* eslint-disable-next-line @typescript-eslint/no-explicit-any */
expandWithParams(expander: (_: any) => any): any;
/**
* Expands each item in `this` into zero or more items. Each item has its parameters expanded
* with one new key, `key`, and the values returned by `expander`.
*/
/* eslint-disable-next-line @typescript-eslint/no-explicit-any */
expand(key: string, expander: (_: any) => any): any;
/**
* Expands each item in `this` to multiple items, one for each item in `newParams`.
*
* In other words, takes the cartesian product of [ the items in `this` ] and `newParams`.
*
* **Note:** When only a single key is being added, use the simpler `combine` for readability.
*
* ```text
* this = [ {a:1}, {b:2} ]
* newParams = [ {x:1}, {y:2} ]
* this.combineP(newParams) = [ {a:1,x:1}, {a:1,y:2}, {b:2,x:1}, {b:2,y:2} ]
* ```
*/
/* eslint-disable-next-line @typescript-eslint/no-explicit-any */
combineWithParams(newParams: Iterable<any>): any;
/**
* Expands each item in `this` to multiple items with `{ [name]: value }` for each value.
*
* In other words, takes the cartesian product of [ the items in `this` ]
* and `[ {[name]: value} for each value in values ]`
*/
/* eslint-disable-next-line @typescript-eslint/no-explicit-any */
combine(key: string, newParams: Iterable<any>): any;
/**
* Filters `this` to only items for which `pred` returns true.
*/
/* eslint-disable-next-line @typescript-eslint/no-explicit-any */
filter(pred: (_: any) => boolean): any;
/**
* Filters `this` to only items for which `pred` returns false.
*/
/* eslint-disable-next-line @typescript-eslint/no-explicit-any */
unless(pred: (_: any) => boolean): any;
}
/**
* Determines the resulting parameter object type which would be generated by an object of
* the given ParamsBuilder type.
*/
export type ParamTypeOf<
/* eslint-disable-next-line @typescript-eslint/no-explicit-any */
T extends ParamsBuilder
> = T extends SubcaseParamsBuilder<infer CaseP, infer SubcaseP>
? Merged<CaseP, SubcaseP>
: T extends CaseParamsBuilder<infer CaseP>
? CaseP
: never;
// ================================================================
// Implementation
// ================================================================
/**
* Iterable over pairs of either:
* - `[case params, Iterable<subcase params>]` if there are subcases.
* - `[case params, undefined]` if not.
*/
export type CaseSubcaseIterable<CaseP, SubcaseP> = Iterable<
readonly [CaseP, Iterable<SubcaseP> | undefined]
>;
/**
* Base class for `CaseParamsBuilder` and `SubcaseParamsBuilder`.
*/
export abstract class ParamsBuilderBase<CaseP extends {}, SubcaseP extends {}> {
protected readonly cases: () => Generator<CaseP>;
constructor(cases: () => Generator<CaseP>) {
this.cases = cases;
}
/**
* Hidden from test files. Use `builderIterateCasesWithSubcases` to access this.
*/
protected abstract iterateCasesWithSubcases(): CaseSubcaseIterable<CaseP, SubcaseP>;
}
/**
* Calls the (normally hidden) `iterateCasesWithSubcases()` method.
*/
export function builderIterateCasesWithSubcases(builder: ParamsBuilderBase<{}, {}>) {
interface IterableParamsBuilder {
iterateCasesWithSubcases(): CaseSubcaseIterable<{}, {}>;
}
return ((builder as unknown) as IterableParamsBuilder).iterateCasesWithSubcases();
}
/**
* Builder for combinatorial test **case** parameters.
*
* CaseParamsBuilder is immutable. Each method call returns a new, immutable object,
* modifying the list of cases according to the method called.
*
* This means, for example, that the `unit` passed into `TestBuilder.params()` can be reused.
*/
export class CaseParamsBuilder<CaseP extends {}>
extends ParamsBuilderBase<CaseP, {}>
implements Iterable<CaseP>, ParamsBuilder {
*iterateCasesWithSubcases(): CaseSubcaseIterable<CaseP, {}> {
for (const a of this.cases()) {
yield [a, undefined];
}
}
[Symbol.iterator](): Iterator<CaseP> {
return this.cases();
}
/** @inheritdoc */
expandWithParams<NewP extends {}>(
expander: (_: Merged<{}, CaseP>) => Iterable<NewP>
): CaseParamsBuilder<Merged<CaseP, NewP>> {
const newGenerator = expanderGenerator(this.cases, expander);
return new CaseParamsBuilder(() => newGenerator({}));
}
/** @inheritdoc */
expand<NewPKey extends string, NewPValue>(
key: NewPKey,
expander: (_: Merged<{}, CaseP>) => Iterable<NewPValue>
): CaseParamsBuilder<Merged<CaseP, { [name in NewPKey]: NewPValue }>> {
return this.expandWithParams(function* (p) {
for (const value of expander(p)) {
// TypeScript doesn't know here that NewPKey is always a single literal string type.
yield { [key]: value } as { [name in NewPKey]: NewPValue };
}
});
}
/** @inheritdoc */
combineWithParams<NewP extends {}>(
newParams: Iterable<NewP>
): CaseParamsBuilder<Merged<CaseP, NewP>> {
return this.expandWithParams(() => newParams);
}
/** @inheritdoc */
combine<NewPKey extends string, NewPValue>(
key: NewPKey,
values: Iterable<NewPValue>
): CaseParamsBuilder<Merged<CaseP, { [name in NewPKey]: NewPValue }>> {
return this.expand(key, () => values);
}
/** @inheritdoc */
filter(pred: (_: Merged<{}, CaseP>) => boolean): CaseParamsBuilder<CaseP> {
const newGenerator = filterGenerator(this.cases, pred);
return new CaseParamsBuilder(() => newGenerator({}));
}
/** @inheritdoc */
unless(pred: (_: Merged<{}, CaseP>) => boolean): CaseParamsBuilder<CaseP> {
return this.filter(x => !pred(x));
}
/**
* "Finalize" the list of cases and begin defining subcases.
* Returns a new SubcaseParamsBuilder. Methods called on SubcaseParamsBuilder
* generate new subcases instead of new cases.
*/
beginSubcases(): SubcaseParamsBuilder<CaseP, {}> {
return new SubcaseParamsBuilder(
() => this.cases(),
function* () {
yield {};
}
);
}
}
/**
* The unit CaseParamsBuilder, representing a single case with no params: `[ {} ]`.
*
* `punit` is passed to every `.params()`/`.paramsSubcasesOnly()` call, so `kUnitCaseParamsBuilder`
* is only explicitly needed if constructing a ParamsBuilder outside of a test builder.
*/
export const kUnitCaseParamsBuilder = new CaseParamsBuilder(function* () {
yield {};
});
/**
* Builder for combinatorial test _subcase_ parameters.
*
* SubcaseParamsBuilder is immutable. Each method call returns a new, immutable object,
* modifying the list of subcases according to the method called.
*/
export class SubcaseParamsBuilder<CaseP extends {}, SubcaseP extends {}>
extends ParamsBuilderBase<CaseP, SubcaseP>
implements ParamsBuilder {
protected readonly subcases: (_: CaseP) => Generator<SubcaseP>;
constructor(cases: () => Generator<CaseP>, generator: (_: CaseP) => Generator<SubcaseP>) {
super(cases);
this.subcases = generator;
}
*iterateCasesWithSubcases(): CaseSubcaseIterable<CaseP, SubcaseP> {
for (const caseP of this.cases()) {
const subcases = Array.from(this.subcases(caseP));
if (subcases.length) {
yield [caseP, subcases];
}
}
}
/** @inheritdoc */
expandWithParams<NewP extends {}>(
expander: (_: Merged<CaseP, SubcaseP>) => Iterable<NewP>
): SubcaseParamsBuilder<CaseP, Merged<SubcaseP, NewP>> {
return new SubcaseParamsBuilder(this.cases, expanderGenerator(this.subcases, expander));
}
/** @inheritdoc */
expand<NewPKey extends string, NewPValue>(
key: NewPKey,
expander: (_: Merged<CaseP, SubcaseP>) => Iterable<NewPValue>
): SubcaseParamsBuilder<CaseP, Merged<SubcaseP, { [name in NewPKey]: NewPValue }>> {
return this.expandWithParams(function* (p) {
for (const value of expander(p)) {
// TypeScript doesn't know here that NewPKey is always a single literal string type.
yield { [key]: value } as { [name in NewPKey]: NewPValue };
}
});
}
/** @inheritdoc */
combineWithParams<NewP extends {}>(
newParams: Iterable<NewP>
): SubcaseParamsBuilder<CaseP, Merged<SubcaseP, NewP>> {
return this.expandWithParams(() => newParams);
}
/** @inheritdoc */
combine<NewPKey extends string, NewPValue>(
key: NewPKey,
values: Iterable<NewPValue>
): SubcaseParamsBuilder<CaseP, Merged<SubcaseP, { [name in NewPKey]: NewPValue }>> {
return this.expand(key, () => values);
}
/** @inheritdoc */
filter(pred: (_: Merged<CaseP, SubcaseP>) => boolean): SubcaseParamsBuilder<CaseP, SubcaseP> {
return new SubcaseParamsBuilder(this.cases, filterGenerator(this.subcases, pred));
}
/** @inheritdoc */
unless(pred: (_: Merged<CaseP, SubcaseP>) => boolean): SubcaseParamsBuilder<CaseP, SubcaseP> {
return this.filter(x => !pred(x));
}
}
function expanderGenerator<Base, A, B>(
baseGenerator: (_: Base) => Generator<A>,
expander: (_: Merged<Base, A>) => Iterable<B>
): (_: Base) => Generator<Merged<A, B>> {
return function* (base: Base) {
for (const a of baseGenerator(base)) {
for (const b of expander(mergeParams(base, a))) {
yield mergeParams(a, b);
}
}
};
}
function filterGenerator<Base, A>(
baseGenerator: (_: Base) => Generator<A>,
pred: (_: Merged<Base, A>) => boolean
): (_: Base) => Generator<A> {
return function* (base: Base) {
for (const a of baseGenerator(base)) {
if (pred(mergeParams(base, a))) {
yield a;
}
}
};
} | the_stack |
import {ClusterProfile, ElasticAgentProfile, ElasticAgentProfiles} from "models/elastic_profiles/types";
import {Configuration, Configurations} from "models/shared/configuration";
import {EncryptedValue, PlainTextValue} from "models/shared/config_value";
describe("Types", () => {
describe("Elastic Agent Profiles", () => {
describe("Validation", () => {
it("should validate elastic agent profile", () => {
const elasticProfile = new ElasticAgentProfile("", "", "", true, new Configurations([]));
expect(elasticProfile.isValid()).toBe(false);
expect(elasticProfile.errors().count()).toBe(3);
expect(elasticProfile.errors().keys().sort()).toEqual(["clusterProfileId", "id", "pluginId"]);
});
it("should validate elastic agent profile id format", () => {
const elasticProfile = new ElasticAgentProfile("invalid id", "pluginId", "foo", true, new Configurations([]));
expect(elasticProfile.isValid()).toBe(false);
expect(elasticProfile.errors().count()).toBe(1);
expect(elasticProfile.errors().keys()).toEqual(["id"]);
expect(elasticProfile.errors().errors("id"))
.toEqual(["Invalid Id. This must be alphanumeric and can contain underscores and periods (however, it cannot start with a period)."]);
});
it("should validate existence of cluster profile id", () => {
const elasticProfile = new ElasticAgentProfile("id", "pluginId", undefined, true, new Configurations([]));
expect(elasticProfile.isValid()).toBe(false);
expect(elasticProfile.errors().count()).toBe(1);
expect(elasticProfile.errors().keys()).toEqual(["clusterProfileId"]);
expect(elasticProfile.errors().errors("clusterProfileId"))
.toEqual(["Cluster profile id must be present"]);
});
});
describe("Serialization and Deserialization", () => {
it("should serialize elastic agent profile", () => {
const elasticProfile = new ElasticAgentProfile(
"docker1",
"cd.go.docker",
"prod-cluster",
true,
new Configurations([
new Configuration("image", new PlainTextValue("gocd/server")),
new Configuration("secret", new EncryptedValue("alskdad"))
]));
expect(JSON.parse(JSON.stringify(elasticProfile.toJSON()))).toEqual({
id: "docker1",
plugin_id: "cd.go.docker",
cluster_profile_id: "prod-cluster",
properties: [{
key: "image",
value: "gocd/server"
},
{
key: "secret",
encrypted_value: "alskdad"
}
]
});
});
it("should deserialize elastic agent profile", () => {
const elasticProfile = ElasticAgentProfile.fromJSON({
id: "docker1",
plugin_id: "cd.go.docker",
cluster_profile_id: "prod-cluster",
can_administer: true,
properties: [{
key: "image",
value: "gocd/server",
encrypted_value: null
},
{
key: "memory",
value: "10M",
encrypted_value: null
}
]
});
expect(elasticProfile.id()).toEqual("docker1");
expect(elasticProfile.pluginId()).toEqual("cd.go.docker");
expect(elasticProfile.clusterProfileId()).toEqual("prod-cluster");
expect(elasticProfile.properties()!.count()).toBe(2);
expect(elasticProfile.properties()!.valueFor("image")).toEqual("gocd/server");
expect(elasticProfile.properties()!.valueFor("memory")).toEqual("10M");
});
it("should serialize encrypted value as value when updated", () => {
const elasticProfile = new ElasticAgentProfile(
"docker1",
"cd.go.docker",
"prod-cluster",
true,
new Configurations([
new Configuration("image", new PlainTextValue("gocd/server")),
new Configuration("secret", new EncryptedValue("alskdad"))
]));
elasticProfile.properties()!.setConfiguration("secret", "foo");
expect(JSON.parse(JSON.stringify(elasticProfile.toJSON()))).toEqual({
id: "docker1",
plugin_id: "cd.go.docker",
cluster_profile_id: "prod-cluster",
properties: [{
key: "image",
value: "gocd/server"
},
{
key: "secret",
value: "foo"
}
]
});
});
it("should filter the elastic agent profiles by cluster profile", () => {
const elasticAgentProfiles = new ElasticAgentProfiles([new ElasticAgentProfile("profile_1",
"plugin_id",
"cluster1")]);
expect(elasticAgentProfiles.filterByClusterProfile("cluster1").length).toEqual(1);
expect(elasticAgentProfiles.filterByClusterProfile("cluster2").length).toEqual(0);
});
});
});
describe("Cluster Profiles", () => {
describe("Validation", () => {
it("should validate cluster profile", () => {
const clusterProfile = new ClusterProfile("", "", true, new Configurations([]));
expect(clusterProfile.isValid()).toBe(false);
expect(clusterProfile.errors().count()).toBe(2);
expect(clusterProfile.errors().keys().sort()).toEqual(["id", "pluginId"]);
});
it("should validate cluster profile id format", () => {
const clusterProfile = new ClusterProfile("invalid id", "pluginId", true, new Configurations([]));
expect(clusterProfile.isValid()).toBe(false);
expect(clusterProfile.errors().count()).toBe(1);
expect(clusterProfile.errors().keys()).toEqual(["id"]);
expect(clusterProfile.errors().errors("id"))
.toEqual(["Invalid Id. This must be alphanumeric and can contain underscores and periods (however, it cannot start with a period)."]);
});
});
describe("Serialization and Deserialization", () => {
it("should serialize cluster profile", () => {
const clusterProfile = new ClusterProfile(
"docker1",
"cd.go.docker",
true,
new Configurations([
new Configuration("image", new PlainTextValue("gocd/server")),
new Configuration("secret", new EncryptedValue("alskdad"))
]));
expect(JSON.parse(JSON.stringify(clusterProfile.toJSON()))).toEqual({
id: "docker1",
plugin_id: "cd.go.docker",
properties: [{
key: "image",
value: "gocd/server"
},
{
key: "secret",
encrypted_value: "alskdad"
}
]
});
});
it("should deserialize cluster profile", () => {
const clusterProfile = ClusterProfile.fromJSON({
id: "docker1",
plugin_id: "cd.go.docker",
can_administer: true,
properties: [{
key: "image",
value: "gocd/server",
encrypted_value: null
},
{
key: "memory",
value: "10M",
encrypted_value: null
}
]
});
expect(clusterProfile.id()).toEqual("docker1");
expect(clusterProfile.pluginId()).toEqual("cd.go.docker");
expect(clusterProfile.properties()!.count()).toBe(2);
expect(clusterProfile.properties()!.valueFor("image")).toEqual("gocd/server");
expect(clusterProfile.properties()!.valueFor("memory")).toEqual("10M");
});
it("should serialize encrypted value as value when updated", () => {
const clusterProfile = new ClusterProfile(
"docker1",
"cd.go.docker",
true,
new Configurations([
new Configuration("image", new PlainTextValue("gocd/server")),
new Configuration("secret", new EncryptedValue("alskdad"))
]));
clusterProfile.properties()!.setConfiguration("secret", "foo");
expect(JSON.parse(JSON.stringify(clusterProfile.toJSON()))).toEqual({
id: "docker1",
plugin_id: "cd.go.docker",
properties: [{
key: "image",
value: "gocd/server"
},
{
key: "secret",
value: "foo"
}
]
});
});
});
});
}); | the_stack |
| Copyright (c) 2014-2017, PhosphorJS Contributors
|
| Distributed under the terms of the BSD 3-Clause License.
|
| The full license is in the file LICENSE, distributed with this software.
|----------------------------------------------------------------------------*/
import {
ArrayExt, each, filter, find, max
} from '@lumino/algorithm';
import {
IDisposable
} from '@lumino/disposable';
import {
ISignal, Signal
} from '@lumino/signaling';
import {
Widget
} from './widget';
/**
* A class which tracks focus among a set of widgets.
*
* This class is useful when code needs to keep track of the most
* recently focused widget(s) among a set of related widgets.
*/
export
class FocusTracker<T extends Widget> implements IDisposable {
/**
* Construct a new focus tracker.
*/
constructor() { }
/**
* Dispose of the resources held by the tracker.
*/
dispose(): void {
// Do nothing if the tracker is already disposed.
if (this._counter < 0) {
return;
}
// Mark the tracker as disposed.
this._counter = -1;
// Clear the connections for the tracker.
Signal.clearData(this);
// Remove all event listeners.
each(this._widgets, w => {
w.node.removeEventListener('focus', this, true);
w.node.removeEventListener('blur', this, true);
});
// Clear the internal data structures.
this._activeWidget = null;
this._currentWidget = null;
this._nodes.clear();
this._numbers.clear();
this._widgets.length = 0;
}
/**
* A signal emitted when the current widget has changed.
*/
get currentChanged(): ISignal<this, FocusTracker.IChangedArgs<T>> {
return this._currentChanged;
}
/**
* A signal emitted when the active widget has changed.
*/
get activeChanged(): ISignal<this, FocusTracker.IChangedArgs<T>> {
return this._activeChanged;
}
/**
* A flag indicating whether the tracker is disposed.
*/
get isDisposed(): boolean {
return this._counter < 0;
}
/**
* The current widget in the tracker.
*
* #### Notes
* The current widget is the widget among the tracked widgets which
* has the *descendant node* which has most recently been focused.
*
* The current widget will not be updated if the node loses focus. It
* will only be updated when a different tracked widget gains focus.
*
* If the current widget is removed from the tracker, the previous
* current widget will be restored.
*
* This behavior is intended to follow a user's conceptual model of
* a semantically "current" widget, where the "last thing of type X"
* to be interacted with is the "current instance of X", regardless
* of whether that instance still has focus.
*/
get currentWidget(): T | null {
return this._currentWidget;
}
/**
* The active widget in the tracker.
*
* #### Notes
* The active widget is the widget among the tracked widgets which
* has the *descendant node* which is currently focused.
*/
get activeWidget(): T | null {
return this._activeWidget;
}
/**
* A read only array of the widgets being tracked.
*/
get widgets(): ReadonlyArray<T> {
return this._widgets;
}
/**
* Get the focus number for a particular widget in the tracker.
*
* @param widget - The widget of interest.
*
* @returns The focus number for the given widget, or `-1` if the
* widget has not had focus since being added to the tracker, or
* is not contained by the tracker.
*
* #### Notes
* The focus number indicates the relative order in which the widgets
* have gained focus. A widget with a larger number has gained focus
* more recently than a widget with a smaller number.
*
* The `currentWidget` will always have the largest focus number.
*
* All widgets start with a focus number of `-1`, which indicates that
* the widget has not been focused since being added to the tracker.
*/
focusNumber(widget: T): number {
let n = this._numbers.get(widget);
return n === undefined ? -1 : n;
}
/**
* Test whether the focus tracker contains a given widget.
*
* @param widget - The widget of interest.
*
* @returns `true` if the widget is tracked, `false` otherwise.
*/
has(widget: T): boolean {
return this._numbers.has(widget);
}
/**
* Add a widget to the focus tracker.
*
* @param widget - The widget of interest.
*
* #### Notes
* A widget will be automatically removed from the tracker if it
* is disposed after being added.
*
* If the widget is already tracked, this is a no-op.
*/
add(widget: T): void {
// Do nothing if the widget is already tracked.
if (this._numbers.has(widget)) {
return;
}
// Test whether the widget has focus.
let focused = widget.node.contains(document.activeElement);
// Set up the initial focus number.
let n = focused ? this._counter++ : -1;
// Add the widget to the internal data structures.
this._widgets.push(widget);
this._numbers.set(widget, n);
this._nodes.set(widget.node, widget);
// Set up the event listeners. The capturing phase must be used
// since the 'focus' and 'blur' events don't bubble and Firefox
// doesn't support the 'focusin' or 'focusout' events.
widget.node.addEventListener('focus', this, true);
widget.node.addEventListener('blur', this, true);
// Connect the disposed signal handler.
widget.disposed.connect(this._onWidgetDisposed, this);
// Set the current and active widgets if needed.
if (focused) {
this._setWidgets(widget, widget);
}
}
/**
* Remove a widget from the focus tracker.
*
* #### Notes
* If the widget is the `currentWidget`, the previous current widget
* will become the new `currentWidget`.
*
* A widget will be automatically removed from the tracker if it
* is disposed after being added.
*
* If the widget is not tracked, this is a no-op.
*/
remove(widget: T): void {
// Bail early if the widget is not tracked.
if (!this._numbers.has(widget)) {
return;
}
// Disconnect the disposed signal handler.
widget.disposed.disconnect(this._onWidgetDisposed, this);
// Remove the event listeners.
widget.node.removeEventListener('focus', this, true);
widget.node.removeEventListener('blur', this, true);
// Remove the widget from the internal data structures.
ArrayExt.removeFirstOf(this._widgets, widget);
this._nodes.delete(widget.node);
this._numbers.delete(widget);
// Bail early if the widget is not the current widget.
if (this._currentWidget !== widget) {
return;
}
// Filter the widgets for those which have had focus.
let valid = filter(this._widgets, w => this._numbers.get(w) !== -1);
// Get the valid widget with the max focus number.
let previous = max(valid, (first, second) => {
let a = this._numbers.get(first)!;
let b = this._numbers.get(second)!;
return a - b;
}) || null;
// Set the current and active widgets.
this._setWidgets(previous, null);
}
/**
* Handle the DOM events for the focus tracker.
*
* @param event - The DOM event sent to the panel.
*
* #### Notes
* This method implements the DOM `EventListener` interface and is
* called in response to events on the tracked nodes. It should
* not be called directly by user code.
*/
handleEvent(event: Event): void {
switch (event.type) {
case 'focus':
this._evtFocus(event as FocusEvent);
break;
case 'blur':
this._evtBlur(event as FocusEvent);
break;
}
}
/**
* Set the current and active widgets for the tracker.
*/
private _setWidgets(current: T | null, active: T | null): void {
// Swap the current widget.
let oldCurrent = this._currentWidget;
this._currentWidget = current;
// Swap the active widget.
let oldActive = this._activeWidget;
this._activeWidget = active;
// Emit the `currentChanged` signal if needed.
if (oldCurrent !== current) {
this._currentChanged.emit({ oldValue: oldCurrent, newValue: current });
}
// Emit the `activeChanged` signal if needed.
if (oldActive !== active) {
this._activeChanged.emit({ oldValue: oldActive, newValue: active });
}
}
/**
* Handle the `'focus'` event for a tracked widget.
*/
private _evtFocus(event: FocusEvent): void {
// Find the widget which gained focus, which is known to exist.
let widget = this._nodes.get(event.currentTarget as HTMLElement)!;
// Update the focus number if necessary.
if (widget !== this._currentWidget) {
this._numbers.set(widget, this._counter++);
}
// Set the current and active widgets.
this._setWidgets(widget, widget);
}
/**
* Handle the `'blur'` event for a tracked widget.
*/
private _evtBlur(event: FocusEvent): void {
// Find the widget which lost focus, which is known to exist.
let widget = this._nodes.get(event.currentTarget as HTMLElement)!;
// Get the node which being focused after this blur.
let focusTarget = event.relatedTarget as HTMLElement;
// If no other node is being focused, clear the active widget.
if (!focusTarget) {
this._setWidgets(this._currentWidget, null);
return;
}
// Bail if the focus widget is not changing.
if (widget.node.contains(focusTarget)) {
return;
}
// If no tracked widget is being focused, clear the active widget.
if (!find(this._widgets, w => w.node.contains(focusTarget))) {
this._setWidgets(this._currentWidget, null);
return;
}
}
/**
* Handle the `disposed` signal for a tracked widget.
*/
private _onWidgetDisposed(sender: T): void {
this.remove(sender);
}
private _counter = 0;
private _widgets: T[] = [];
private _activeWidget: T | null = null;
private _currentWidget: T | null = null;
private _numbers = new Map<T, number>();
private _nodes = new Map<HTMLElement, T>();
private _activeChanged = new Signal<this, FocusTracker.IChangedArgs<T>>(this);
private _currentChanged = new Signal<this, FocusTracker.IChangedArgs<T>>(this);
}
/**
* The namespace for the `FocusTracker` class statics.
*/
export
namespace FocusTracker {
/**
* An arguments object for the changed signals.
*/
export
interface IChangedArgs<T extends Widget> {
/**
* The old value for the widget.
*/
oldValue: T | null;
/**
* The new value for the widget.
*/
newValue: T | null;
}
} | the_stack |
import { logger } from '../../services/logger';
import { SolanaConfig } from './solana.config';
import { countDecimals, TokenValue, walletPath } from '../../services/base';
import NodeCache from 'node-cache';
import bs58 from 'bs58';
import { BigNumber } from 'ethers';
import {
AccountInfo,
Commitment,
Connection,
Keypair,
LogsCallback,
LogsFilter,
ParsedAccountData,
PublicKey,
SlotUpdateCallback,
TokenAmount,
TransactionResponse,
} from '@solana/web3.js';
import {
AccountInfo as TokenAccount,
Token as TokenProgram,
} from '@solana/spl-token';
import { TokenInfo, TokenListProvider } from '@solana/spl-token-registry';
import { TransactionResponseStatusCode } from './solana.requests';
import fse from 'fs-extra';
import { ConfigManagerCertPassphrase } from '../../services/config-manager-cert-passphrase';
const crypto = require('crypto').webcrypto;
export type Solanaish = Solana;
export class Solana {
public rpcUrl;
public transactionLamports;
public cache: NodeCache;
protected tokenList: TokenInfo[] = [];
private _tokenMap: Record<string, TokenInfo> = {};
private _tokenAddressMap: Record<string, TokenInfo> = {};
private static _instance: Solana;
private _requestCount: number;
private readonly _connection: Connection;
private readonly _lamportPrice: number;
private readonly _lamportDecimals: number;
private readonly _nativeTokenSymbol: string;
private readonly _tokenProgramAddress: PublicKey;
private readonly _cluster: string;
private readonly _metricsLogInterval: number;
// there are async values set in the constructor
private _ready: boolean = false;
private _initializing: boolean = false;
private _initPromise: Promise<void> = Promise.resolve();
constructor() {
this._cluster = SolanaConfig.config.network.slug;
if (SolanaConfig.config.customRpcUrl == undefined) {
switch (this._cluster) {
case 'mainnet-beta':
this.rpcUrl = 'https://api.mainnet-beta.solana.com';
break;
case 'devnet':
this.rpcUrl = 'https://api.devnet.solana.com';
break;
case 'testnet':
this.rpcUrl = 'https://api.testnet.solana.com';
break;
default:
throw new Error('SOLANA_CHAIN not valid');
}
} else {
this.rpcUrl = SolanaConfig.config.customRpcUrl;
}
this._connection = new Connection(this.rpcUrl, 'processed' as Commitment);
this.cache = new NodeCache({ stdTTL: 3600 }); // set default cache ttl to 1hr
this._nativeTokenSymbol = 'SOL';
this._tokenProgramAddress = new PublicKey(SolanaConfig.config.tokenProgram);
this.transactionLamports = SolanaConfig.config.transactionLamports;
this._lamportPrice = SolanaConfig.config.lamportsToSol;
this._lamportDecimals = countDecimals(this._lamportPrice);
this._requestCount = 0;
this._metricsLogInterval = 300000; // 5 minutes
this.onDebugMessage('all', this.requestCounter.bind(this));
setInterval(this.metricLogger.bind(this), this.metricsLogInterval);
}
public get gasPrice(): number {
return this._lamportPrice;
}
public static getInstance(): Solana {
if (!Solana._instance) {
Solana._instance = new Solana();
}
return Solana._instance;
}
public static getConnectedInstances(): { [name: string]: Solana } {
return { solana: Solana._instance };
}
public static reload(): Solana {
Solana._instance = new Solana();
return Solana._instance;
}
ready(): boolean {
return this._ready;
}
public get connection() {
return this._connection;
}
public onNewSlot(func: SlotUpdateCallback) {
this._connection.onSlotUpdate(func);
}
public onDebugMessage(filter: LogsFilter, func: LogsCallback) {
this._connection.onLogs(filter, func);
}
async init(): Promise<void> {
if (!this.ready() && !this._initializing) {
this._initializing = true;
this._initPromise = this.loadTokens().then(() => {
this._ready = true;
this._initializing = false;
});
}
return this._initPromise;
}
async loadTokens(): Promise<void> {
this.tokenList = await this.getTokenList();
this.tokenList.forEach((token: TokenInfo) => {
this._tokenMap[token.symbol] = token;
this._tokenAddressMap[token.address] = token;
});
}
// returns a Tokens for a given list source and list type
async getTokenList(): Promise<TokenInfo[]> {
const tokens = await new TokenListProvider().resolve();
return tokens.filterByClusterSlug(this._cluster).getList();
}
// returns the price of 1 lamport in SOL
public get lamportPrice(): number {
return this._lamportPrice;
}
// solana token lists are large. instead of reloading each time with
// getTokenList, we can read the stored tokenList value from when the
// object was initiated.
public get storedTokenList(): TokenInfo[] {
return this.tokenList;
}
// return the TokenInfo object for a symbol
getTokenForSymbol(symbol: string): TokenInfo | null {
return this._tokenMap[symbol] ?? null;
}
// return the TokenInfo object for a symbol
getTokenForMintAddress(mintAddress: PublicKey): TokenInfo | null {
return this._tokenAddressMap[mintAddress.toString()]
? this._tokenAddressMap[mintAddress.toString()]
: null;
}
// returns Keypair for a private key, which should be encoded in Base58
getKeypairFromPrivateKey(privateKey: string): Keypair {
const decoded = bs58.decode(privateKey);
return Keypair.fromSecretKey(decoded);
}
async getKeypair(address: string): Promise<Keypair> {
const path = `${walletPath}/solana`;
const encryptedPrivateKey: any = JSON.parse(
await fse.readFile(`${path}/${address}.json`, 'utf8'),
(key, value) => {
switch (key) {
case 'ciphertext':
case 'salt':
case 'iv':
return bs58.decode(value);
default:
return value;
}
}
);
const passphrase = ConfigManagerCertPassphrase.readPassphrase();
if (!passphrase) {
throw new Error('missing passphrase');
}
return await this.decrypt(encryptedPrivateKey, passphrase);
}
private static async getKeyMaterial(password: string) {
const enc = new TextEncoder();
return await crypto.subtle.importKey(
'raw',
enc.encode(password),
'PBKDF2',
false,
['deriveBits', 'deriveKey']
);
}
private static async getKey(
keyAlgorithm: {
salt: Uint8Array;
name: string;
iterations: number;
hash: string;
},
keyMaterial: CryptoKey
) {
return await crypto.subtle.deriveKey(
keyAlgorithm,
keyMaterial,
{ name: 'AES-GCM', length: 256 },
true,
['encrypt', 'decrypt']
);
}
// Takes a base58 encoded privateKey and saves it to a json
async encrypt(privateKey: string, password: string): Promise<string> {
const iv = crypto.getRandomValues(new Uint8Array(16));
const salt = crypto.getRandomValues(new Uint8Array(16));
const keyMaterial = await Solana.getKeyMaterial(password);
const keyAlgorithm = {
name: 'PBKDF2',
salt: salt,
iterations: 500000,
hash: 'SHA-256',
};
const key = await Solana.getKey(keyAlgorithm, keyMaterial);
const cipherAlgorithm = {
name: 'AES-GCM',
iv: iv,
};
const enc = new TextEncoder();
const ciphertext: ArrayBuffer = await crypto.subtle.encrypt(
cipherAlgorithm,
key,
enc.encode(privateKey)
);
return JSON.stringify(
{
keyAlgorithm,
cipherAlgorithm,
ciphertext: new Uint8Array(ciphertext),
},
(key, value) => {
switch (key) {
case 'ciphertext':
case 'salt':
case 'iv':
return bs58.encode(value);
default:
return value;
}
}
);
}
async decrypt(encryptedPrivateKey: any, password: string): Promise<Keypair> {
logger.info(encryptedPrivateKey.keyAlgorithm.salt);
logger.info(encryptedPrivateKey.cipherAlgorithm.iv);
logger.info(encryptedPrivateKey.ciphertext);
const keyMaterial = await Solana.getKeyMaterial(password);
const key = await Solana.getKey(
encryptedPrivateKey.keyAlgorithm,
keyMaterial
);
const decrypted = await crypto.subtle.decrypt(
encryptedPrivateKey.cipherAlgorithm,
key,
encryptedPrivateKey.ciphertext
);
const dec = new TextDecoder();
dec.decode(decrypted);
return Keypair.fromSecretKey(bs58.decode(dec.decode(decrypted)));
}
async getBalances(wallet: Keypair): Promise<Record<string, TokenValue>> {
const balances: Record<string, TokenValue> = {};
balances['SOL'] = await this.getSolBalance(wallet);
const allSplTokens = await this.connection.getParsedTokenAccountsByOwner(
wallet.publicKey,
{ programId: this._tokenProgramAddress }
);
allSplTokens.value.forEach(
(tokenAccount: {
pubkey: PublicKey;
account: AccountInfo<ParsedAccountData>;
}) => {
const tokenInfo = tokenAccount.account.data.parsed['info'];
const symbol = this.getTokenForMintAddress(tokenInfo['mint'])?.symbol;
if (symbol != null)
balances[symbol.toUpperCase()] = this.tokenResponseToTokenValue(
tokenInfo['tokenAmount']
);
}
);
return balances;
}
// returns the SOL balance, convert BigNumber to string
async getSolBalance(wallet: Keypair): Promise<TokenValue> {
const lamports = await this.connection.getBalance(wallet.publicKey);
return { value: BigNumber.from(lamports), decimals: this._lamportDecimals };
}
tokenResponseToTokenValue(account: TokenAmount): TokenValue {
return {
value: BigNumber.from(account.amount),
decimals: account.decimals,
};
}
// returns the balance for an SPL token
public async getSplBalance(
walletAddress: PublicKey,
mintAddress: PublicKey
): Promise<TokenValue> {
const response = await this.connection.getParsedTokenAccountsByOwner(
walletAddress,
{ mint: mintAddress }
);
if (response['value'].length == 0) {
throw new Error(`Token account not initialized`);
}
return this.tokenResponseToTokenValue(
response.value[0].account.data.parsed['info']['tokenAmount']
);
}
// returns whether the token account is initialized, given its mint address
async isTokenAccountInitialized(
walletAddress: PublicKey,
mintAddress: PublicKey
): Promise<boolean> {
const response = await this.connection.getParsedTokenAccountsByOwner(
walletAddress,
{ programId: this._tokenProgramAddress }
);
for (const accountInfo of response.value) {
if (
accountInfo.account.data.parsed['info']['mint'] ==
mintAddress.toBase58()
)
return true;
}
return false;
}
// returns token account if is initialized, given its mint address
public async getTokenAccount(
walletAddress: PublicKey,
mintAddress: PublicKey
): Promise<{
pubkey: PublicKey;
account: AccountInfo<ParsedAccountData>;
} | null> {
const response = await this.connection.getParsedTokenAccountsByOwner(
walletAddress,
{ programId: this._tokenProgramAddress }
);
for (const accountInfo of response.value) {
if (
accountInfo.account.data.parsed['info']['mint'] ==
mintAddress.toBase58()
)
return accountInfo;
}
return null;
}
// Gets token account information, or creates a new token account for given token mint address
// if needed, which costs 0.035 SOL
async getOrCreateAssociatedTokenAccount(
wallet: Keypair,
tokenAddress: PublicKey
): Promise<TokenAccount | null> {
const tokenProgram = new TokenProgram(
this._connection,
tokenAddress,
this._tokenProgramAddress,
wallet
);
return await tokenProgram.getOrCreateAssociatedAccountInfo(
wallet.publicKey
);
}
// returns an ethereum TransactionResponse for a txHash.
async getTransaction(
payerSignature: string
): Promise<TransactionResponse | null> {
if (this.cache.keys().includes(payerSignature)) {
// If it's in the cache, return the value in cache, whether it's null or not
return this.cache.get(payerSignature) as TransactionResponse;
} else {
// If it's not in the cache,
const fetchedTx = this._connection.getTransaction(payerSignature, {
commitment: 'confirmed',
});
this.cache.set(payerSignature, fetchedTx); // Cache the fetched receipt, whether it's null or not
return fetchedTx;
}
}
// returns an ethereum TransactionResponseStatusCode for a txData.
public async getTransactionStatusCode(
txData: TransactionResponse | null
): Promise<TransactionResponseStatusCode> {
let txStatus;
if (!txData) {
// tx not found, didn't reach the mempool or it never existed
txStatus = TransactionResponseStatusCode.FAILED;
} else {
txStatus =
txData.meta?.err == null
? TransactionResponseStatusCode.CONFIRMED
: TransactionResponseStatusCode.FAILED;
// TODO implement TransactionResponseStatusCode PROCESSED, FINALISED,
// based on how many blocks ago the Transaction was
}
return txStatus;
}
// caches transaction receipt once they arrive
cacheTransactionReceipt(tx: TransactionResponse) {
// first (payer) signature is used as cache key since it is unique enough
this.cache.set(tx.transaction.signatures[0], tx);
}
public getTokenBySymbol(tokenSymbol: string): TokenInfo | undefined {
return this.tokenList.find(
(token: TokenInfo) =>
token.symbol.toUpperCase() === tokenSymbol.toUpperCase()
);
}
// returns the current slot number
async getCurrentSlotNumber(): Promise<number> {
return await this._connection.getSlot();
}
public requestCounter(msg: any): void {
if (msg.action === 'request') this._requestCount += 1;
}
public metricLogger(): void {
logger.info(
this.requestCount +
' request(s) sent in last ' +
this.metricsLogInterval / 1000 +
' seconds.'
);
this._requestCount = 0; // reset
}
public get cluster(): string {
return this._cluster;
}
public get nativeTokenSymbol(): string {
return this._nativeTokenSymbol;
}
public get requestCount(): number {
return this._requestCount;
}
public get metricsLogInterval(): number {
return this._metricsLogInterval;
}
// returns the current block number
async getCurrentBlockNumber(): Promise<number> {
return await this.connection.getSlot('processed');
}
} | the_stack |
import * as React from 'react';
import { connect } from 'react-redux';
import { Link } from 'react-router-dom';
import ProjectEnvironmentUnlockService from '../../services/ProjectEnvironmentUnlock';
import ProjectEnvironmentService from '../../services/ProjectEnvironment';
import { fetchProject } from '../../state/project/actions';
import Alert from '../../components/Alert';
import AlertErrorValidation from '../../components/AlertErrorValidation';
import Button from '../../components/Button';
import Container from '../../components/Container';
import Grid from '../../components/Grid';
import Loader from '../../components/Loader';
import Panel from '../../components/Panel';
import PanelBody from '../../components/PanelBody';
import TextField from '../../components/TextField';
import EnvironmentServersTable from './components/EnvironmentServersTable';
import { buildAlertFromResponse } from '../../utils/alert';
import Layout from "../../components/Layout";
import ProjectHeading from '../../components/ProjectHeading/ProjectHeading';
import ProjectServerApi from "../../services/Api/ProjectServerApi";
import { createToast } from "../../state/alert/alertActions";
import {fetchMe} from "../../state/auth/authActions";
class ProjectEnvironmentUnlockPage extends React.Component<any, any> {
constructor(props) {
super(props);
this.state = {
environment: {
key: '',
contents: '',
servers: [],
},
projectServers: [],
errors: [],
status: {},
unlocked: false
};
this.handleInputChange = this.handleInputChange.bind(this);
this.handleClick = this.handleClick.bind(this);
this.handleUpdateClick = this.handleUpdateClick.bind(this);
this.handleCancelClick = this.handleCancelClick.bind(this);
this.handleSyncServerClick = this.handleSyncServerClick.bind(this);
}
/**
* Fetch project through dispatch during componentWillMount cycle.
* Listen for environment updates when component has mounted.
*/
componentDidMount() {
const { dispatch, project, match } = this.props;
const projectId = match.params.project_id;
dispatch(fetchProject(projectId));
dispatch(fetchMe());
this.setProjectServers(projectId);
this.setEnvironment(project);
}
/**
* Update state when component props update.
*/
componentWillReceiveProps(nextProps: any): void {
const { auth } = this.props;
if (nextProps.auth.user !== auth.user && nextProps.auth.user) {
const user = nextProps.auth.user;
this.listenForEvents(user.id);
}
}
/**
* Listen for Echo related events.
*/
listenForEvents = (userId: number): void => {
const echoWindow: any = window;
const Echo = echoWindow.Echo;
if (Echo !== null) {
Echo.private(`user.${ userId }`)
.listen('.Deploy\\Events\\EnvironmentSyncing', (e) => {
const serverId = e.serverId;
const serverStatus = e.status;
this.setState((prevState) => {
const status = {
...prevState.status,
[serverId]: serverStatus
};
return { status: status };
});
})
.listen('.Deploy\\Events\\EnvironmentSynced', (e) => {
const serverId = e.serverId;
const serverStatus = e.status;
this.setState((prevState) => {
const status = {
...prevState.status,
[serverId]: serverStatus
};
return { status: status };
});
});
}
}
/**
* Setup environment.
*/
setEnvironment = (project: any) => {
this.setState((prevState) => {
const projectServers = project.item.environment_servers;
const servers = projectServers.map((server) => {
return server.server_id;
}, []);
const environment = {
...prevState.environment,
servers: servers,
};
return { environment: environment };
});
}
/**
* Setup project servers.
*/
setProjectServers = (projectId: number) => {
const projectServerApi = new ProjectServerApi();
projectServerApi.list(projectId)
.then((response) => {
this.setState({ projectServers: response.data })
})
.catch((error) => {
console.log(error.response);
})
}
/**
* Handle input change.
*/
handleInputChange = (event): void => {
const value = event.target.value;
const name = event.target.name;
this.setState((prevState) => {
let environment = {
...prevState.environment,
[name]: value
};
return { environment: environment }
});
}
/**
* Handle environment unlock click.
*/
handleClick = (): void => {
const { environment } = this.state;
const { project } = this.props;
const projectEnvironmentUnlockService = new ProjectEnvironmentUnlockService;
projectEnvironmentUnlockService
.post(project.item.id, environment)
.then((response) => {
this.setState({errors: []});
this.setState((state) => {
let environment = Object.assign({}, state.environment, response.data);
return {
environment: environment,
unlocked: true
}
});
},
(error) => {
this.setState({ errors: buildAlertFromResponse(error.response) });
});
}
/**
* Handle environment update click.
*/
handleUpdateClick = (): void => {
const { project, dispatch } = this.props;
const { environment } = this.state;
const projectEnvironmentService = new ProjectEnvironmentService;
projectEnvironmentService
.put(project.item.id, environment)
.then((response) => {
this.setState({ errors: [] });
dispatch(createToast('Updated environment successfully.'));
},
(error) => {
this.setState({ errors: buildAlertFromResponse(error.response) });
});
}
/**
* Handle cancel environment update click.
*/
handleCancelClick = (): void => {
this.setState((prevState) => ({
...prevState,
environment: {
...prevState.environment,
key: null,
contents: null
},
unlocked: false
}));
}
/**
* Handle click for adding to servers that should be synced.
*/
handleSyncServerClick = (serverId: number): void => {
this.setState((prevState) => {
let environmentServers = prevState.environment.servers;
if (environmentServers.indexOf(serverId) === -1) {
environmentServers.push(serverId);
} else {
environmentServers = environmentServers.filter((environmentServer) => {
return environmentServer !== serverId;
});
}
const environment = {
...prevState.environment,
servers: environmentServers,
};
return { environment: environment };
});
}
/**
* Map environment servers.
*/
mapEnvironmentServers = (environmentServers: any[]): any[] => {
return (environmentServers || []).map(environmentServer => {
return parseInt(environmentServer.server_id);
});
}
/**
* Render component.
*/
render() {
const { environment, errors, status, unlocked, projectServers } = this.state;
const { project } = this.props;
if (project.isFetching) {
return (
<Layout project={ project.item }>
<ProjectHeading project={ project.item } />
<div className="content">
<Container fluid>
<Loader />
</Container>
</div>
</Layout>
)
}
if (unlocked) {
return (
<Layout project={project.item}>
<ProjectHeading project={ project.item } />
<div className="content">
<Container fluid>
<Alert type="warning">
Your environment information will be stored in an .env file on your servers.
</Alert>
<div className="row">
<Grid xs={12} md={8}>
<Panel>
<PanelBody>
{errors.length ? <AlertErrorValidation errors={errors} /> : ''}
<TextField
label="Key"
name="key"
type="password"
onChange={this.handleInputChange}
value={environment.key}
/>
<div className="form-group">
<label>Contents</label>
<textarea
className="form-control"
name="contents"
onChange={this.handleInputChange}
rows={ 6 }
style={{fontFamily: 'monospace', resize: 'vertical'}}
defaultValue={environment.contents}
/>
</div>
<Button
onClick={this.handleCancelClick}
style={{marginRight: 5}}
>Cancel</Button>
<Button
onClick={this.handleUpdateClick}
>Update Environment</Button>
</PanelBody>
</Panel>
</Grid>
<Grid xs={12} md={4}>
<EnvironmentServersTable
projectServers={ projectServers }
syncedServers={ environment.servers }
status={ status }
onSyncServerClick={this.handleSyncServerClick}
/>
</Grid>
</div>
</Container>
</div>
</Layout>
)
}
return (
<Layout project={project.item}>
<ProjectHeading project={ project.item } />
<div className="content">
<Container fluid>
<Alert type="warning">
Your environment information will be encrypted on our server using your
chosen key. You will also have to provide your key each time you wish to
update your information.
<br/><br/>
Please keep in mind that we do not store your key and have no way of
retrieving it. Therefore if you forget your key, you will need to reset your key which will also
result in any previous encrypted environment information being cleared
from our server.
</Alert>
<Panel>
<PanelBody>
{errors.length ? <AlertErrorValidation errors={errors} /> : ''}
<div className="form-group">
<TextField
label="Key"
name="key"
type="password"
onChange={this.handleInputChange}
value={environment.key}
/>
</div>
<div className="form-group">
<Button
onClick={this.handleClick}
>Unlock Environment</Button>
</div>
<Link
to={'/projects/' + project.item.id + '/environment-reset'}
>Need to reset your key?</Link>
</PanelBody>
</Panel>
</Container>
</div>
</Layout>
)
}
}
const mapStateToProps = state => {
return {
project: state.project,
auth: state.auth,
};
};
export default connect(
mapStateToProps
)(ProjectEnvironmentUnlockPage); | the_stack |
import * as _ from 'lodash';
import {
add_dummy_feature,
Binarizer,
MinMaxScaler,
normalize,
OneHotEncoder,
PolynomialFeatures,
} from '../../src/lib/preprocessing';
import {
ConstructionError,
Validation1DMatrixError,
ValidationError,
ValidationKeyNotFoundError,
} from '../../src/lib/utils/Errors';
describe('data:add_dummy_feature', () => {
const X1 = [[0, 1], [1, 0]];
const X2 = [[0, 1, 2], [1, 0, 3]];
it('should return correct result for X1 with default value', () => {
const expectedResult = [[1, 0, 1], [1, 1, 0]];
const result = add_dummy_feature(X1);
expect(result).toEqual(expectedResult);
});
it('should return correct result for X2 with default value', () => {
const expectedResult = [[1, 0, 1, 2], [1, 1, 0, 3]];
const result = add_dummy_feature(X2);
expect(result).toEqual(expectedResult);
});
it('should return correct result for X1 with value 2', () => {
const expectedResult = [[2, 0, 1], [2, 1, 0]];
const result = add_dummy_feature(X1, 2);
expect(result).toEqual(expectedResult);
});
it('should throw error when invalid data is given', () => {
try {
add_dummy_feature(true as any);
} catch (err) {
expect(err).toBeInstanceOf(ValidationError);
}
try {
add_dummy_feature(1 as any);
} catch (err) {
expect(err).toBeInstanceOf(ValidationError);
}
try {
add_dummy_feature(null);
} catch (err) {
expect(err).toBeInstanceOf(ValidationError);
}
try {
add_dummy_feature(undefined);
} catch (err) {
expect(err).toBeInstanceOf(ValidationError);
}
});
});
describe('data:OneHotEncoder', () => {
// Datasets for OneHotEncoding
const planetList = [
{ planet: 'mars', isGasGiant: false, value: 10 },
{ planet: 'saturn', isGasGiant: true, value: 20 },
{ planet: 'jupiter', isGasGiant: true, value: 30 },
];
it('should encode planet list correctly', () => {
const enc = new OneHotEncoder();
const expectedEncode = [[-1, 0, 1, 0, 0], [0, 1, 0, 1, 0], [1, 1, 0, 0, 1]];
const encodeInfo = enc.encode(planetList, {
dataKeys: ['value', 'isGasGiant'],
labelKeys: ['planet'],
});
expect(encodeInfo.data).toEqual(expectedEncode);
});
it('should decode planet list correctly', () => {
const enc = new OneHotEncoder();
const encodeInfo = enc.encode(planetList, {
dataKeys: ['value', 'isGasGiant'],
labelKeys: ['planet'],
});
const decodedInfo = enc.decode(encodeInfo.data, encodeInfo.decoders);
expect(planetList).toEqual(decodedInfo);
});
it("Invalid data key 'values' should throw an Error", () => {
const enc = new OneHotEncoder();
try {
enc.encode(planetList, {
dataKeys: ['values'],
labelKeys: ['planet'],
});
} catch (err) {
expect(err).toBeInstanceOf(ValidationKeyNotFoundError);
}
});
it("Invalid label key 'planot' should throw an Error", () => {
const enc = new OneHotEncoder();
try {
enc.encode(planetList, {
dataKeys: ['value'],
labelKeys: ['planot'],
});
} catch (err) {
expect(err).toBeInstanceOf(ValidationKeyNotFoundError);
}
});
});
describe('data:MinMaxScaler', () => {
const matrix1 = [
[7, 0.27, 0.36, 20.7, 0.045, 45, 170, 1.001, 3, 0.45, 8.8],
[6.3, 0.3, 0.34, 1.6, 0.049, 14, 132, 0.994, 3.3, 0.49, 9.5],
[8.1, 0.28, 0.4, 6.9, 0.05, 30, 97, 0.9951, 3.26, 0.44, 10.1],
[7.2, 0.23, 0.32, 8.5, 0.058, 47, 186, 0.9956, 3.19, 0.4, 9.9],
[7.2, 0.23, 0.32, 8.5, 0.058, 47, 186, 0.9956, 3.19, 0.4, 9.9],
];
it('should feature range [0, 1] of [4, 5, 6] return [0, 0.5, 1]', () => {
const expectedResult = [0, 0.5, 1];
const minmaxScaler = new MinMaxScaler({ featureRange: [0, 1] });
minmaxScaler.fit([4, 5, 6]);
const result = minmaxScaler.fit_transform([4, 5, 6]);
expect(expectedResult).toEqual(result);
// expect(_.isEqual(expectedResult, result)).toBe(true);
});
it('should feature range [0, 100] of [4, 5, 6] return [0, 50, 100]', () => {
const expectedResult = [0, 50, 100];
const minmaxScaler = new MinMaxScaler({ featureRange: [0, 100] });
minmaxScaler.fit([4, 5, 6]);
const result = minmaxScaler.fit_transform([4, 5, 6]);
expect(_.isEqual(expectedResult, result)).toBe(true);
});
it('should feature range [-100, 100] of [4, 5, 6] return [ -100, 0, 100 ]', () => {
const expectedResult = [-100, 0, 100];
const minmaxScaler = new MinMaxScaler({ featureRange: [-100, 100] });
minmaxScaler.fit([4, 5, 6]);
const result = minmaxScaler.fit_transform([4, 5, 6]);
expect(_.isEqual(expectedResult, result)).toBe(true);
});
it('matrix dataset test1', () => {
const expected = [0.005135651088817423, 0.01051329621806706, 0.015890941347316695];
const scaler = new MinMaxScaler({ featureRange: [0, 1] });
scaler.fit(matrix1);
const result = scaler.transform([1, 2, 3]);
expect(result).toEqual(expected);
});
it('should transform matrix1 then successfully inverse tranform', () => {
const expected = [0.005135651088817423, 0.01051329621806706, 0.015890941347316695];
const scaler = new MinMaxScaler({ featureRange: [0, 1] });
scaler.fit(matrix1);
const data = [1, 2, 3];
const transformed = scaler.transform(data);
expect(transformed).toEqual(expected);
const result = scaler.inverse_transform(transformed);
expect(result).toEqual(data);
});
it('should not fit invalid inputs', () => {
const scaler = new MinMaxScaler({ featureRange: [0, 1] });
try {
scaler.fit('?' as any);
} catch (err) {
expect(err).toBeInstanceOf(ValidationError);
}
try {
scaler.fit(1 as any);
} catch (err) {
expect(err).toBeInstanceOf(ValidationError);
}
try {
scaler.fit([] as any);
} catch (err) {
expect(err).toBeInstanceOf(ValidationError);
}
});
it('should not fit_transform invalid inputs', () => {
const scaler = new MinMaxScaler({ featureRange: [0, 1] });
try {
scaler.fit_transform('?' as any);
} catch (err) {
expect(err).toBeInstanceOf(ValidationError);
}
try {
scaler.fit_transform(1 as any);
} catch (err) {
expect(err).toBeInstanceOf(ValidationError);
}
try {
scaler.fit_transform([] as any);
} catch (err) {
expect(err).toBeInstanceOf(ValidationError);
}
});
it('should not inverse_transform invalid inputs', () => {
const scaler = new MinMaxScaler({ featureRange: [0, 1] });
try {
scaler.inverse_transform('?' as any);
} catch (err) {
expect(err).toBeInstanceOf(ValidationError);
}
try {
scaler.inverse_transform(1 as any);
} catch (err) {
expect(err).toBeInstanceOf(ValidationError);
}
try {
scaler.inverse_transform([] as any);
} catch (err) {
expect(err).toBeInstanceOf(Validation1DMatrixError);
}
});
});
describe('data:Binarizer', () => {
it('Should [[1, -1, 2], [2, 0, 0], [0, 1, -1]] return [[ 1, 0, 1 ], [ 1, 0, 0 ], [ 0, 1, 0 ]]', () => {
const binX = [[1, -1, 2], [2, 0, 0], [0, 1, -1]];
const expected = [[1, 0, 1], [1, 0, 0], [0, 1, 0]];
const newBin = new Binarizer({ threshold: 0 });
const binResult = newBin.transform(binX);
expect(_.isEqual(binResult, expected)).toBe(true);
});
it('should not fit invalid data', () => {
const newBin = new Binarizer({ threshold: 0 });
try {
newBin.fit([]);
} catch (err) {
expect(err).toBeInstanceOf(ValidationError);
}
try {
newBin.fit('?' as any);
} catch (err) {
expect(err).toBeInstanceOf(ValidationError);
}
try {
newBin.fit(null);
} catch (err) {
expect(err).toBeInstanceOf(ValidationError);
}
});
it('should not transform invalid data', () => {
const newBin = new Binarizer({ threshold: 0 });
try {
newBin.transform([]);
} catch (err) {
expect(err).toBeInstanceOf(ValidationError);
}
try {
newBin.transform('?' as any);
} catch (err) {
expect(err).toBeInstanceOf(ValidationError);
}
try {
newBin.transform(null);
} catch (err) {
expect(err).toBeInstanceOf(ValidationError);
}
});
});
describe('data:PolynomialFeatures', () => {
const X1 = [[0, 1], [2, 3], [4, 5]];
// Functionalities
it('should transform X1 with default degree value', () => {
const poly = new PolynomialFeatures();
const result = poly.transform(X1);
const expected = [[1, 0, 1, 0, 0, 1], [1, 2, 3, 4, 6, 9], [1, 4, 5, 16, 20, 25]];
expect(result).toEqual(expected);
});
it('should transform X1 with degree value 3', () => {
const poly = new PolynomialFeatures({ degree: 3 });
const result = poly.transform(X1);
const expected = [
[1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1],
[1, 2, 3, 4, 6, 9, 8, 12, 12, 18, 18, 27],
[1, 4, 5, 16, 20, 25, 64, 80, 80, 100, 100, 125],
];
expect(result).toEqual(expected);
});
// Exceptions
it('should not transform when invalid values are given', () => {
const poly = new PolynomialFeatures();
try {
poly.transform(null);
} catch (err) {
expect(err).toBeInstanceOf(ValidationError);
}
try {
poly.transform([]);
} catch (err) {
expect(err).toBeInstanceOf(ValidationError);
}
try {
poly.transform(1 as any);
} catch (err) {
expect(err).toBeInstanceOf(ValidationError);
}
try {
poly.transform('string' as any);
} catch (err) {
expect(err).toBeInstanceOf(ValidationError);
}
});
// TODO: Implement matrix data type check in validateMatrixXX and reimplement the test
/* it('should not transform when matrix with non numeric value is given', () => {
const poly = new PolynomialFeatures();
const X = [[1, 2, true], [2, 1, 'string'], [null, null, null]];
console.log(poly.transform(X));
expect(() => poly.transform(X)).toThrow('X has to be a matrix of numbers');
}); */
it('should not initiate the class if an invalid degree value is given', () => {
try {
const poly = new PolynomialFeatures({ degree: null });
poly.transform();
} catch (err) {
expect(err).toBeInstanceOf(ConstructionError);
}
try {
const poly = new PolynomialFeatures({ degree: 'string' });
poly.transform();
} catch (err) {
expect(err).toBeInstanceOf(ConstructionError);
}
try {
const poly = new PolynomialFeatures({ degree: [] });
poly.transform();
} catch (err) {
expect(err).toBeInstanceOf(ConstructionError);
}
});
});
describe('data:normalize', () => {
const X1 = [[1, -1, 2], [2, 0, 0], [0, 1, -1]];
it('should normalize X1 with l2 norm', () => {
const expected = [
[0.4082482904638631, -0.4082482904638631, 0.8164965809277261],
[1, 0, 0],
[0, 0.7071067811865475, -0.7071067811865475],
];
const result = normalize(X1, { norm: 'l2' });
expect(result).toEqual(expected);
});
it('should normalize X1 with l1 norm', () => {
const expected = [[0.25, -0.25, 0.5], [1, 0, 0], [0, 0.5, -0.5]];
const result = normalize(X1, { norm: 'l1' });
expect(result).toEqual(expected);
});
it('should throw an error if unrecognised norm is passed in', () => {
try {
normalize(X1, { norm: 'test' });
} catch (err) {
expect(err).toBeInstanceOf(ValidationError);
}
});
it('should throw an error if the input is invalid', () => {
try {
normalize(null, { norm: 'l1' });
} catch (err) {
expect(err).toBeInstanceOf(ValidationError);
}
try {
normalize([], { norm: 'l1' });
} catch (err) {
expect(err).toBeInstanceOf(ValidationError);
}
try {
normalize('string' as any, { norm: 'l1' });
} catch (err) {
expect(err).toBeInstanceOf(ValidationError);
}
});
}); | the_stack |
'use strict';
import * as _ from 'lodash';
import * as jwt from 'jsonwebtoken';
import * as chai from 'chai';
import * as sinon from 'sinon';
import * as sinonChai from 'sinon-chai';
import * as chaiAsPromised from 'chai-as-promised';
import * as mocks from '../../resources/mocks';
import {
appCheckErrorFromCryptoSignerError,
AppCheckTokenGenerator
} from '../../../src/app-check/token-generator';
import {
CryptoSignerError, CryptoSignerErrorCode, ServiceAccountSigner
} from '../../../src/utils/crypto-signer';
import { ServiceAccountCredential } from '../../../src/app/credential-internal';
import { FirebaseAppCheckError } from '../../../src/app-check/app-check-api-client-internal';
import * as utils from '../utils';
chai.should();
chai.use(sinonChai);
chai.use(chaiAsPromised);
const expect = chai.expect;
const ALGORITHM = 'RS256';
const FIVE_MIN_IN_SECONDS = 5 * 60;
const FIREBASE_APP_CHECK_AUDIENCE = 'https://firebaseappcheck.googleapis.com/google.firebase.appcheck.v1beta.TokenExchangeService';
/**
* Verifies a token is signed with the private key corresponding to the provided public key.
*
* @param {string} token The token to verify.
* @param {string} publicKey The public key to use to verify the token.
* @return {Promise<object>} A promise fulfilled with the decoded token if it is valid; otherwise, a rejected promise.
*/
function verifyToken(token: string, publicKey: string): Promise<object> {
return new Promise((resolve, reject) => {
jwt.verify(token, publicKey, {
algorithms: [ALGORITHM],
}, (err, res) => {
if (err) {
reject(err);
} else {
resolve(res as object);
}
});
});
}
describe('AppCheckTokenGenerator', () => {
const cert = new ServiceAccountCredential(mocks.certificateObject);
const APP_ID = 'test-app-id';
let clock: sinon.SinonFakeTimers | undefined;
afterEach(() => {
if (clock) {
clock.restore();
clock = undefined;
}
});
describe('Constructor', () => {
it('should throw given no arguments', () => {
expect(() => {
// Need to overcome the type system to allow a call with no parameter
const anyFirebaseAppCheckTokenGenerator: any = AppCheckTokenGenerator;
return new anyFirebaseAppCheckTokenGenerator();
}).to.throw('Must provide a CryptoSigner to use AppCheckTokenGenerator');
});
});
const invalidSigners: any[] = [null, NaN, 0, 1, true, false, '', 'a', [], _.noop];
invalidSigners.forEach((invalidSigner) => {
it('should throw given invalid signer: ' + JSON.stringify(invalidSigner), () => {
expect(() => {
return new AppCheckTokenGenerator(invalidSigner as any);
}).to.throw('Must provide a CryptoSigner to use AppCheckTokenGenerator');
});
});
describe('createCustomToken()', () => {
const tokenGenerator = new AppCheckTokenGenerator(new ServiceAccountSigner(cert));
it('should throw given no appId', () => {
expect(() => {
(tokenGenerator as any).createCustomToken();
}).to.throw(FirebaseAppCheckError).with.property('code', 'app-check/invalid-argument');
});
const invalidAppIds = [null, NaN, 0, 1, true, false, [], {}, { a: 1 }, _.noop];
invalidAppIds.forEach((invalidAppId) => {
it('should throw given a non-string appId: ' + JSON.stringify(invalidAppId), () => {
expect(() => {
tokenGenerator.createCustomToken(invalidAppId as any);
}).to.throw(FirebaseAppCheckError).with.property('code', 'app-check/invalid-argument');
});
});
it('should throw given an empty string appId', () => {
expect(() => {
tokenGenerator.createCustomToken('');
}).to.throw(FirebaseAppCheckError).with.property('code', 'app-check/invalid-argument');
});
const invalidOptions = [null, NaN, 0, 1, true, false, [], _.noop];
invalidOptions.forEach((invalidOption) => {
it('should throw given an invalid options: ' + JSON.stringify(invalidOption), () => {
expect(() => {
tokenGenerator.createCustomToken(APP_ID, invalidOption as any);
}).to.throw(FirebaseAppCheckError).with.property('message', 'AppCheckTokenOptions must be a non-null object.');
});
});
const invalidTtls = [null, NaN, '0', 'abc', '', true, false, [], {}, { a: 1 }, _.noop];
invalidTtls.forEach((invalidTtl) => {
it('should throw given an options object with invalid ttl: ' + JSON.stringify(invalidTtl), () => {
expect(() => {
tokenGenerator.createCustomToken(APP_ID, { ttlMillis: invalidTtl as any });
}).to.throw(FirebaseAppCheckError).with.property('message',
'ttlMillis must be a duration in milliseconds.');
});
});
const THIRTY_MIN_IN_MS = 1800000;
const SEVEN_DAYS_IN_MS = 604800000;
[-100, -1, 0, 10, THIRTY_MIN_IN_MS - 1, SEVEN_DAYS_IN_MS + 1, SEVEN_DAYS_IN_MS * 2].forEach((ttlMillis) => {
it('should throw given options with ttl < 30 minutes or ttl > 7 days:' + JSON.stringify(ttlMillis), () => {
expect(() => {
tokenGenerator.createCustomToken(APP_ID, { ttlMillis });
}).to.throw(FirebaseAppCheckError).with.property(
'message', 'ttlMillis must be a duration in milliseconds between 30 minutes and 7 days (inclusive).');
});
});
it('should be fulfilled with a Firebase Custom JWT with only an APP ID', () => {
return tokenGenerator.createCustomToken(APP_ID)
.should.eventually.be.a('string').and.not.be.empty;
});
[
[THIRTY_MIN_IN_MS, '1800s'],
[THIRTY_MIN_IN_MS + 1, '1800.001000000s'],
[SEVEN_DAYS_IN_MS / 2, '302400s'],
[SEVEN_DAYS_IN_MS - 1, '604799.999000000s'],
[SEVEN_DAYS_IN_MS, '604800s']
].forEach((ttl) => {
it('should be fulfilled with a Firebase Custom JWT with a valid custom ttl' + JSON.stringify(ttl[0]), () => {
return tokenGenerator.createCustomToken(APP_ID, { ttlMillis: ttl[0] as number })
.then((token) => {
const decoded = jwt.decode(token) as { [key: string]: any };
expect(decoded['ttl']).to.equal(ttl[1]);
});
});
});
it('should be fulfilled with a JWT with the correct decoded payload', () => {
clock = sinon.useFakeTimers(1000);
return tokenGenerator.createCustomToken(APP_ID)
.then((token) => {
const decoded = jwt.decode(token);
const expected: { [key: string]: any } = {
// eslint-disable-next-line @typescript-eslint/camelcase
app_id: APP_ID,
iat: 1,
exp: FIVE_MIN_IN_SECONDS + 1,
aud: FIREBASE_APP_CHECK_AUDIENCE,
iss: mocks.certificateObject.client_email,
sub: mocks.certificateObject.client_email,
};
expect(decoded).to.deep.equal(expected);
});
});
[{}, { ttlMillis: undefined }, { a: 123 }].forEach((options) => {
it('should be fulfilled with no ttl in the decoded payload when ttl is not provided in options', () => {
clock = sinon.useFakeTimers(1000);
return tokenGenerator.createCustomToken(APP_ID, options)
.then((token) => {
const decoded = jwt.decode(token);
const expected: { [key: string]: any } = {
// eslint-disable-next-line @typescript-eslint/camelcase
app_id: APP_ID,
iat: 1,
exp: FIVE_MIN_IN_SECONDS + 1,
aud: FIREBASE_APP_CHECK_AUDIENCE,
iss: mocks.certificateObject.client_email,
sub: mocks.certificateObject.client_email,
};
expect(decoded).to.deep.equal(expected);
});
});
});
[
[1800000.000001, '1800.000000001s'],
[1800000.001, '1800.000000999s'],
[172800000, '172800s'],
[604799999, '604799.999000000s'],
[604800000, '604800s']
].forEach((ttl) => {
it('should be fulfilled with a JWT with custom ttl in decoded payload', () => {
clock = sinon.useFakeTimers(1000);
return tokenGenerator.createCustomToken(APP_ID, { ttlMillis: ttl[0] as number })
.then((token) => {
const decoded = jwt.decode(token);
const expected: { [key: string]: any } = {
// eslint-disable-next-line @typescript-eslint/camelcase
app_id: APP_ID,
iat: 1,
exp: FIVE_MIN_IN_SECONDS + 1,
aud: FIREBASE_APP_CHECK_AUDIENCE,
iss: mocks.certificateObject.client_email,
sub: mocks.certificateObject.client_email,
ttl: ttl[1],
};
expect(decoded).to.deep.equal(expected);
});
});
});
it('should be fulfilled with a JWT with the correct header', () => {
clock = sinon.useFakeTimers(1000);
return tokenGenerator.createCustomToken(APP_ID)
.then((token) => {
const decoded: any = jwt.decode(token, {
complete: true,
});
expect(decoded.header).to.deep.equal({
alg: ALGORITHM,
typ: 'JWT',
});
});
});
it('should be fulfilled with a JWT which can be verified by the service account public key', () => {
return tokenGenerator.createCustomToken(APP_ID)
.then((token) => {
return verifyToken(token, mocks.keyPairs[0].public);
});
});
it('should be fulfilled with a JWT which cannot be verified by a random public key', () => {
return tokenGenerator.createCustomToken(APP_ID)
.then((token) => {
return verifyToken(token, mocks.keyPairs[1].public)
.should.eventually.be.rejectedWith('invalid signature');
});
});
it('should be fulfilled with a JWT which expires after five minutes', () => {
clock = sinon.useFakeTimers(1000);
let token: string;
return tokenGenerator.createCustomToken(APP_ID)
.then((result) => {
token = result;
clock!.tick((FIVE_MIN_IN_SECONDS * 1000) - 1);
// Token should still be valid
return verifyToken(token, mocks.keyPairs[0].public);
})
.then(() => {
clock!.tick(1);
// Token should now be invalid
return verifyToken(token, mocks.keyPairs[0].public)
.should.eventually.be.rejectedWith('jwt expired');
});
});
describe('appCheckErrorFromCryptoSignerError', () => {
it('should convert CryptoSignerError to FirebaseAppCheckError', () => {
const cryptoError = new CryptoSignerError({
code: CryptoSignerErrorCode.INVALID_ARGUMENT,
message: 'test error.',
});
const appCheckError = appCheckErrorFromCryptoSignerError(cryptoError);
expect(appCheckError).to.be.an.instanceof(FirebaseAppCheckError);
expect(appCheckError).to.have.property('code', 'app-check/invalid-argument');
expect(appCheckError).to.have.property('message', 'test error.');
});
it('should convert CryptoSignerError HttpError to FirebaseAppCheckError', () => {
const cryptoError = new CryptoSignerError({
code: CryptoSignerErrorCode.SERVER_ERROR,
message: 'test error.',
cause: utils.errorFrom({
error: {
message: 'server error.',
},
})
});
const appCheckError = appCheckErrorFromCryptoSignerError(cryptoError);
expect(appCheckError).to.be.an.instanceof(FirebaseAppCheckError);
expect(appCheckError).to.have.property('code', 'app-check/unknown-error');
expect(appCheckError).to.have.property('message',
'Error returned from server while signing a custom token: server error.');
});
it('should convert CryptoSignerError HttpError with no error.message to FirebaseAppCheckError', () => {
const cryptoError = new CryptoSignerError({
code: CryptoSignerErrorCode.SERVER_ERROR,
message: 'test error.',
cause: utils.errorFrom({
error: {},
})
});
const appCheckError = appCheckErrorFromCryptoSignerError(cryptoError);
expect(appCheckError).to.be.an.instanceof(FirebaseAppCheckError);
expect(appCheckError).to.have.property('code', 'app-check/unknown-error');
expect(appCheckError).to.have.property('message',
'Error returned from server while signing a custom token: '+
'{"status":500,"headers":{},"data":{"error":{}},"text":"{\\"error\\":{}}"}');
});
it('should convert CryptoSignerError HttpError with no errorcode to FirebaseAppCheckError', () => {
const cryptoError = new CryptoSignerError({
code: CryptoSignerErrorCode.SERVER_ERROR,
message: 'test error.',
cause: utils.errorFrom('server error.')
});
const appCheckError = appCheckErrorFromCryptoSignerError(cryptoError);
expect(appCheckError).to.be.an.instanceof(FirebaseAppCheckError);
expect(appCheckError).to.have.property('code', 'app-check/internal-error');
expect(appCheckError).to.have.property('message',
'Error returned from server: null.');
});
});
});
}); | the_stack |
import test from 'japa'
import { DateTime } from 'luxon'
import { SchemaRef, ParsedRule, DurationUnits } from '@ioc:Adonis/Core/Validator'
import { rules } from '../../src/Rules'
import { schema } from '../../src/Schema'
import { validate } from '../fixtures/rules/index'
import { MessagesBag } from '../../src/MessagesBag'
import { ApiErrorReporter } from '../../src/ErrorReporter'
import { before } from '../../src/Validations/date/before'
function compile(keyword: 'today' | 'yesterday'): ParsedRule<any>
// eslint-disable-next-line no-redeclare
function compile(date: SchemaRef<DateTime>): ParsedRule<any>
// eslint-disable-next-line no-redeclare
function compile(interval: number, duration: DurationUnits): ParsedRule<any>
// eslint-disable-next-line no-redeclare
function compile(
interval: number | SchemaRef<DateTime> | 'today' | 'yesterday',
duration?: DurationUnits
): ParsedRule<any> {
const { options } =
typeof interval === 'number'
? rules.before(interval, duration!)
: typeof interval === 'string'
? rules.before(interval)
: rules.before(interval)
return before.compile('literal', 'date', options, {})
}
test.group('Date | Before', () => {
validate(before, test, DateTime.local(), DateTime.local().minus({ days: 2 }), compile(1, 'day'))
test('do not compile when one argument is passed and is not a ref', (assert) => {
const fn = () => before.compile('literal', 'date', ['foo'])
assert.throw(fn, '"before": expects a date offset "duration" and "unit" or a "ref"')
})
test('do not compile when interval is not a number', (assert) => {
const fn = () => before.compile('literal', 'date', ['foo', 'days'])
assert.throw(fn, '"before": expects "duration" to be a number')
})
test('do not compile when interval no arguments are defined', (assert) => {
const fn = () => before.compile('literal', 'date', [])
assert.throw(fn, '"before": expects a date offset "duration" and "unit" or a "ref"')
})
})
test.group('Date | Before | Day', () => {
test('report error when date is not before defined interval', (assert) => {
const reporter = new ApiErrorReporter(new MessagesBag({}), false)
const publishedOn = DateTime.local().toISODate()
before.validate(DateTime.fromISO(publishedOn!), compile(1, 'day').compiledOptions!, {
errorReporter: reporter,
field: 'published_on',
pointer: 'published_on',
tip: {},
root: {},
refs: {},
mutate: () => {},
})
const errors = reporter.toJSON()
assert.lengthOf(errors.errors, 1)
assert.equal(errors.errors[0].field, 'published_on')
assert.equal(errors.errors[0].rule, 'before')
assert.equal(errors.errors[0].message, 'before date validation failed')
})
/**
* The time should have no relevance in case of `days` offset
*/
test('report error when datetime is not before defined interval', (assert) => {
const reporter = new ApiErrorReporter(new MessagesBag({}), false)
const publishedOn = DateTime.local().minus({ days: 1 }).toISO()
before.validate(DateTime.fromISO(publishedOn!), compile(1, 'day').compiledOptions!, {
errorReporter: reporter,
field: 'published_on',
pointer: 'published_on',
tip: {},
root: {},
refs: {},
mutate: () => {},
})
const errors = reporter.toJSON()
assert.lengthOf(errors.errors, 1)
assert.equal(errors.errors[0].field, 'published_on')
assert.equal(errors.errors[0].rule, 'before')
assert.equal(errors.errors[0].message, 'before date validation failed')
})
test('work fine when date is before defined interval', (assert) => {
const reporter = new ApiErrorReporter(new MessagesBag({}), false)
const publishedOn = DateTime.local().minus({ days: 2 }).toISO()
before.validate(DateTime.fromISO(publishedOn!), compile(1, 'day').compiledOptions!, {
errorReporter: reporter,
field: 'published_on',
pointer: 'published_on',
tip: {},
root: {},
refs: {},
mutate: () => {},
})
const errors = reporter.toJSON()
assert.lengthOf(errors.errors, 0)
})
test('report error when date is not before today', (assert) => {
const reporter = new ApiErrorReporter(new MessagesBag({}), false)
const publishedOn = DateTime.local().toISODate()
before.validate(DateTime.fromISO(publishedOn!), compile('today').compiledOptions!, {
errorReporter: reporter,
field: 'published_on',
pointer: 'published_on',
tip: {},
root: {},
refs: {},
mutate: () => {},
})
const errors = reporter.toJSON()
assert.lengthOf(errors.errors, 1)
assert.equal(errors.errors[0].field, 'published_on')
assert.equal(errors.errors[0].rule, 'before')
assert.equal(errors.errors[0].message, 'before date validation failed')
})
test('work fine when date is before today', (assert) => {
const reporter = new ApiErrorReporter(new MessagesBag({}), false)
const publishedOn = DateTime.local().minus({ days: 1 }).toISODate()
before.validate(DateTime.fromISO(publishedOn!), compile('today').compiledOptions!, {
errorReporter: reporter,
field: 'published_on',
pointer: 'published_on',
tip: {},
root: {},
refs: {},
mutate: () => {},
})
const errors = reporter.toJSON()
assert.lengthOf(errors.errors, 0)
})
test('report error when date is not yesterday today', (assert) => {
const reporter = new ApiErrorReporter(new MessagesBag({}), false)
const publishedOn = DateTime.local().minus({ days: 1 }).toISODate()
before.validate(DateTime.fromISO(publishedOn!), compile('yesterday').compiledOptions!, {
errorReporter: reporter,
field: 'published_on',
pointer: 'published_on',
tip: {},
root: {},
refs: {},
mutate: () => {},
})
const errors = reporter.toJSON()
assert.lengthOf(errors.errors, 1)
assert.equal(errors.errors[0].field, 'published_on')
assert.equal(errors.errors[0].rule, 'before')
assert.equal(errors.errors[0].message, 'before date validation failed')
})
test('work fine when date is before yesterday', (assert) => {
const reporter = new ApiErrorReporter(new MessagesBag({}), false)
const publishedOn = DateTime.local().minus({ days: 2 }).toISODate()
before.validate(DateTime.fromISO(publishedOn!), compile('yesterday').compiledOptions!, {
errorReporter: reporter,
field: 'published_on',
pointer: 'published_on',
tip: {},
root: {},
refs: {},
mutate: () => {},
})
const errors = reporter.toJSON()
assert.lengthOf(errors.errors, 0)
})
})
test.group('Date | Before | Minutes', () => {
test('work fine when time is not defined for the same day', (assert) => {
const reporter = new ApiErrorReporter(new MessagesBag({}), false)
const publishedAt = DateTime.local().toISODate()
before.validate(DateTime.fromISO(publishedAt!), compile(30, 'minutes').compiledOptions!, {
errorReporter: reporter,
field: 'published_at',
pointer: 'published_at',
tip: {},
root: {},
refs: {},
mutate: () => {},
})
const errors = reporter.toJSON()
assert.lengthOf(errors.errors, 0)
})
test('report error when time is not before the defined interval', (assert) => {
const reporter = new ApiErrorReporter(new MessagesBag({}), false)
const publishedAt = DateTime.local().toISO()
before.validate(DateTime.fromISO(publishedAt!), compile(30, 'minutes').compiledOptions!, {
errorReporter: reporter,
field: 'published_at',
pointer: 'published_at',
tip: {},
root: {},
refs: {},
mutate: () => {},
})
const errors = reporter.toJSON()
assert.lengthOf(errors.errors, 1)
assert.equal(errors.errors[0].field, 'published_at')
assert.equal(errors.errors[0].rule, 'before')
assert.equal(errors.errors[0].message, 'before date validation failed')
})
test('work fine when time is before the defined interval', (assert) => {
const reporter = new ApiErrorReporter(new MessagesBag({}), false)
const publishedAt = DateTime.local().minus({ minutes: 40 }).toISO()
before.validate(DateTime.fromISO(publishedAt!), compile(30, 'minutes').compiledOptions!, {
errorReporter: reporter,
field: 'published_at',
pointer: 'published_at',
tip: {},
root: {},
refs: {},
mutate: () => {},
})
const errors = reporter.toJSON()
assert.lengthOf(errors.errors, 0)
})
test('work fine when time is not defined for yesterday', (assert) => {
const reporter = new ApiErrorReporter(new MessagesBag({}), false)
const publishedAt = DateTime.local().minus({ days: 1 }).toISODate()
before.validate(DateTime.fromISO(publishedAt!), compile(30, 'minutes').compiledOptions!, {
errorReporter: reporter,
field: 'published_at',
pointer: 'published_at',
tip: {},
root: {},
refs: {},
mutate: () => {},
})
const errors = reporter.toJSON()
assert.lengthOf(errors.errors, 0)
})
})
test.group('Date | Before | Ref', () => {
test('report error when date is not before the defined ref', (assert) => {
const reporter = new ApiErrorReporter(new MessagesBag({}), false)
const publishedAt = DateTime.local().toISODate()
const validator = {
errorReporter: reporter,
field: 'published_at',
pointer: 'published_at',
tip: {},
root: {},
refs: schema.refs({
beforeDate: DateTime.local().minus({ days: 10 }),
}),
mutate: () => {},
}
before.validate(
DateTime.fromISO(publishedAt!),
compile(validator.refs.beforeDate).compiledOptions!,
validator
)
const errors = reporter.toJSON()
assert.lengthOf(errors.errors, 1)
assert.equal(errors.errors[0].field, 'published_at')
assert.equal(errors.errors[0].rule, 'before')
assert.equal(errors.errors[0].message, 'before date validation failed')
})
test('report error when datetime is not before the defined ref', (assert) => {
const reporter = new ApiErrorReporter(new MessagesBag({}), false)
const publishedAt = DateTime.local().toISO()
const validator = {
errorReporter: reporter,
field: 'published_at',
pointer: 'published_at',
tip: {},
root: {},
refs: schema.refs({
beforeDate: DateTime.local().minus({ minutes: 30 }),
}),
mutate: () => {},
}
before.validate(
DateTime.fromISO(publishedAt!),
compile(validator.refs.beforeDate).compiledOptions!,
validator
)
const errors = reporter.toJSON()
assert.lengthOf(errors.errors, 1)
assert.equal(errors.errors[0].field, 'published_at')
assert.equal(errors.errors[0].rule, 'before')
assert.equal(errors.errors[0].message, 'before date validation failed')
})
test('work fine when time is not defined for the same day', (assert) => {
const reporter = new ApiErrorReporter(new MessagesBag({}), false)
const publishedAt = DateTime.local().minus({ minutes: 5 }).toISODate()
const validator = {
errorReporter: reporter,
field: 'published_at',
pointer: 'published_at',
tip: {},
root: {},
refs: schema.refs({
beforeDate: DateTime.local().minus({ minutes: 10 }),
}),
mutate: () => {},
}
before.validate(
DateTime.fromISO(publishedAt!),
compile(validator.refs.beforeDate).compiledOptions!,
validator
)
const errors = reporter.toJSON()
assert.lengthOf(errors.errors, 0)
})
test('work fine when date is before the defined ref', (assert) => {
const reporter = new ApiErrorReporter(new MessagesBag({}), false)
const publishedAt = DateTime.local().minus({ days: 11 }).toISODate()
const validator = {
errorReporter: reporter,
field: 'published_at',
pointer: 'published_at',
tip: {},
root: {},
refs: schema.refs({
beforeDate: DateTime.local().minus({ days: 10 }),
}),
mutate: () => {},
}
before.validate(
DateTime.fromISO(publishedAt!),
compile(validator.refs.beforeDate).compiledOptions!,
validator
)
const errors = reporter.toJSON()
assert.lengthOf(errors.errors, 0)
})
test('work fine when datetime is before the defined ref', (assert) => {
const reporter = new ApiErrorReporter(new MessagesBag({}), false)
const publishedAt = DateTime.local().minus({ minutes: 30 }).toISO()
const validator = {
errorReporter: reporter,
field: 'published_at',
pointer: 'published_at',
tip: {},
root: {},
refs: schema.refs({
beforeDate: DateTime.local().minus({ minutes: 10 }),
}),
mutate: () => {},
}
before.validate(
DateTime.fromISO(publishedAt!),
compile(validator.refs.beforeDate).compiledOptions!,
validator
)
const errors = reporter.toJSON()
assert.lengthOf(errors.errors, 0)
})
test('work fine when time is not defined for the previous day', (assert) => {
const reporter = new ApiErrorReporter(new MessagesBag({}), false)
const publishedAt = DateTime.local().minus({ days: 1 }).toISODate()
const validator = {
errorReporter: reporter,
field: 'published_at',
pointer: 'published_at',
tip: {},
root: {},
refs: schema.refs({
beforeDate: DateTime.local().minus({ minutes: 10 }),
}),
mutate: () => {},
}
before.validate(
DateTime.fromISO(publishedAt!),
compile(validator.refs.beforeDate).compiledOptions!,
validator
)
const errors = reporter.toJSON()
assert.lengthOf(errors.errors, 0)
})
}) | the_stack |
import { IBuildingQueryEventArgs, INoResultsEventArgs, QueryEvents } from '../../src/events/QueryEvents';
import { Defer } from '../../src/misc/Defer';
import { analyticsActionCauseList } from '../../src/ui/Analytics/AnalyticsActionListMeta';
import { QueryBuilder } from '../../src/ui/Base/QueryBuilder';
import { IPagerOptions, Pager } from '../../src/ui/Pager/Pager';
import { $$ } from '../../src/utils/Dom';
import { registerCustomMatcher } from '../CustomMatchers';
import { FakeResults } from '../Fake';
import * as Mock from '../MockEnvironment';
import { Simulate } from '../Simulate';
import { l } from '../../src/strings/Strings';
import { find } from 'underscore';
export function PagerTest() {
describe('Pager', () => {
let test: Mock.IBasicComponentSetup<Pager>;
function simulatePageCount(pageCount: number, currentPage = 1) {
const builder = new QueryBuilder();
builder.firstResult = (currentPage - 1) * 10;
Simulate.query(test.env, {
query: builder.build(),
results: FakeResults.createFakeResults(pageCount * 10)
});
}
function getRenderedButtonLabels() {
return $$(test.cmp.element)
.findAll('a.coveo-pager-list-item-text')
.map(item => item.innerText);
}
beforeEach(() => {
registerCustomMatcher();
test = Mock.basicComponentSetup<Pager>(Pager);
});
afterEach(() => {
test = null;
});
it('should set the correct result number when changing page', () => {
let currentPage = 1;
$$(test.env.root).on('buildingQuery', (e, args: IBuildingQueryEventArgs) => {
expect(args.queryBuilder.build().firstResult).toBe(currentPage * 10);
});
test.cmp.setPage(++currentPage);
test.cmp.setPage(++currentPage);
currentPage--;
test.cmp.previousPage();
currentPage++;
test.cmp.nextPage();
expect(test.env.queryController.executeQuery).toHaveBeenCalledTimes(4);
});
it('should not be possible to set current page to an invalid value', () => {
test.cmp.setPage('a' as any);
expect(test.cmp.currentPage).toBe(1);
test.cmp.setPage('1' as any);
expect(test.cmp.currentPage).toBe(1);
test.cmp.setPage(2 as any);
expect(test.cmp.currentPage).toBe(2);
test.cmp.setPage(1.7 as any);
expect(test.cmp.currentPage).toBe(1);
test.cmp.setPage(1.5 as any);
expect(test.cmp.currentPage).toBe(1);
test.cmp.setPage(1.499999 as any);
expect(test.cmp.currentPage).toBe(1);
test.cmp.setPage('1.599999' as any);
expect(test.cmp.currentPage).toBe(1);
test.cmp.setPage('2.00000' as any);
expect(test.cmp.currentPage).toBe(2);
test.cmp.setPage({} as any);
expect(test.cmp.currentPage).toBe(1);
test.cmp.setPage(true as any);
expect(test.cmp.currentPage).toBe(1);
test.cmp.setPage(false as any);
expect(test.cmp.currentPage).toBe(1);
test.cmp.setPage(0 as any);
expect(test.cmp.currentPage).toBe(1);
});
it('should update the state when changing page', () => {
let currentPage = 1;
test.cmp.setPage(++currentPage);
expect(test.env.queryStateModel.set).toHaveBeenCalledWith('first', (currentPage - 1) * 10);
test.cmp.setPage(++currentPage);
expect(test.env.queryStateModel.set).toHaveBeenCalledWith('first', (currentPage - 1) * 10);
currentPage--;
test.cmp.previousPage();
expect(test.env.queryStateModel.set).toHaveBeenCalledWith('first', (currentPage - 1) * 10);
currentPage++;
test.cmp.nextPage();
expect(test.env.queryStateModel.set).toHaveBeenCalledWith('first', (currentPage - 1) * 10);
});
describe('when query state model is updated', () => {
beforeEach(() => {
test = Mock.advancedComponentSetup<Pager>(
Pager,
new Mock.AdvancedComponentSetupOptions(undefined, undefined, env => {
return env.withLiveQueryStateModel();
})
);
});
it('should update current page when first result is changed', () => {
test.env.queryStateModel.set('first', 30);
expect(test.cmp.currentPage).toBe(4);
});
it('should update current page when number of results per page is changed', () => {
test.env.queryStateModel.set('numberOfResults', 10);
test.env.queryStateModel.set('first', 49);
expect(test.cmp.currentPage).toBe(5);
test.env.queryStateModel.set('numberOfResults', 50);
expect(test.cmp.currentPage).toBe(1);
});
});
it('should not render anything if only one page of result is returned', () => {
Simulate.query(test.env, { results: FakeResults.createFakeResults(5) });
expect(test.cmp.element.querySelectorAll('li').length).toBe(0);
});
it('should render the pager boundary correctly', () => {
simulatePageCount(100, 8);
const anchors = $$(test.cmp.element).findAll('a.coveo-pager-list-item-text');
expect($$(anchors[0]).text()).toBe('6');
expect(anchors[0].parentElement.getAttribute('tabindex')).toBe('0');
expect($$(anchors[anchors.length - 1]).text()).toBe('10');
});
it('should always respect an uneven numberOfPages when enough pages exist', () => {
test.cmp.options.numberOfPages = 5;
simulatePageCount(7);
expect(getRenderedButtonLabels()).toEqual(['1', '2', '3', '4', '5']);
simulatePageCount(7, 4);
expect(getRenderedButtonLabels()).toEqual(['2', '3', '4', '5', '6']);
simulatePageCount(7, 7);
expect(getRenderedButtonLabels()).toEqual(['3', '4', '5', '6', '7']);
});
it('should always respect an even numberOfPages when enough pages exist', () => {
test.cmp.options.numberOfPages = 4;
simulatePageCount(7);
expect(getRenderedButtonLabels()).toEqual(['1', '2', '3', '4']);
simulatePageCount(7, 4);
expect(getRenderedButtonLabels()).toEqual(['2', '3', '4', '5']);
simulatePageCount(7, 7);
expect(getRenderedButtonLabels()).toEqual(['4', '5', '6', '7']);
});
it('should render buttons for every page when the amount is lower than numberOfPages', () => {
test.cmp.options.numberOfPages = 5;
simulatePageCount(3);
expect(getRenderedButtonLabels()).toEqual(['1', '2', '3']);
simulatePageCount(3, 2);
expect(getRenderedButtonLabels()).toEqual(['1', '2', '3']);
simulatePageCount(3, 3);
expect(getRenderedButtonLabels()).toEqual(['1', '2', '3']);
});
it('should render buttons for every page when the amount is equal to numberOfPages', () => {
test.cmp.options.numberOfPages = 5;
simulatePageCount(5);
expect(getRenderedButtonLabels()).toEqual(['1', '2', '3', '4', '5']);
simulatePageCount(5, 3);
expect(getRenderedButtonLabels()).toEqual(['1', '2', '3', '4', '5']);
simulatePageCount(5, 5);
expect(getRenderedButtonLabels()).toEqual(['1', '2', '3', '4', '5']);
});
describe('with 100 fake results', () => {
let listItems: HTMLElement[];
beforeEach(() => {
const builder = new QueryBuilder();
builder.firstResult = 50;
Simulate.query(test.env, {
query: builder.build(),
results: FakeResults.createFakeResults(100)
});
listItems = $$(test.cmp.element).findAll('.coveo-pager-list-item');
});
it('should set the aria-label on the navigation element', () => {
expect(test.cmp['list'].getAttribute('aria-label')).toEqual(l('Pagination'));
});
it('should set the role on the navigation element', () => {
expect(test.cmp['list'].getAttribute('role')).toEqual('navigation');
});
it('should set the aria-label on elements correctly', () => {
listItems.forEach((listItem, index) => {
if (index !== 0 && index !== listItems.length - 1) {
const pageNumber = parseInt($$(listItem).text());
expect(listItem.getAttribute('aria-label')).toEqual(l('PageNumber', pageNumber.toString()));
}
});
});
it('should set the role on elements', () => {
listItems.forEach(listItem => expect(listItem.getAttribute('role')).toEqual('button'));
});
it('should not make the next arrow a toggle', () => {
expect(test.cmp.element.querySelector('.coveo-pager-next').getAttribute('aria-pressed')).toBeNull();
});
it('should not make the previous arrow a toggle', () => {
expect(test.cmp.element.querySelector('.coveo-pager-previous').getAttribute('aria-pressed')).toBeNull();
});
it('should set aria-pressed to true on the active page element', () => {
const activeElement = find(listItems, listItem => $$(listItem).text() === test.cmp.currentPage.toString());
expect(activeElement.getAttribute('aria-pressed')).toEqual(true.toString());
});
it('should set aria-pressed to false on every inactive page element', () => {
listItems.forEach((listItem, index) => {
if (index !== 0 && index !== listItems.length - 1) {
if ($$(listItem).text() !== test.cmp.currentPage.toString()) {
expect(listItem.getAttribute('aria-pressed')).toEqual(false.toString());
}
}
});
});
it('should set tabindex to -1 on every link element', () => {
listItems.forEach(listItem => expect(listItem.children.item(0).getAttribute('tabindex')).toEqual('-1'));
});
it('should set aria-hidden to true on every link element', () => {
listItems.forEach(listItem => expect(listItem.children.item(0).getAttribute('aria-hidden')).toEqual('true'));
});
});
it('should not reset page number on a new query if the origin is a pager', () => {
test.cmp.setPage(6);
expect(test.cmp.currentPage).toBe(6);
$$(test.env.root).trigger(QueryEvents.newQuery, {
origin: test.cmp
});
expect(test.cmp.currentPage).toBe(6);
});
it('should not reset page number on a new query if the origin is a debug panel', () => {
test.cmp.setPage(10);
expect(test.cmp.currentPage).toBe(10);
$$(test.env.root).trigger(QueryEvents.newQuery, {
origin: { type: 'Debug' }
});
expect(test.cmp.currentPage).toBe(10);
});
it('should reset the page number on a new query if the origin is not set', () => {
test.cmp.setPage(5);
expect(test.cmp.currentPage).toBe(5);
$$(test.env.root).trigger(QueryEvents.newQuery, {});
expect(test.cmp.currentPage).toBe(1);
});
it('should reset the page number on a new query if the origin is something not recognized', () => {
test.cmp.setPage(10);
expect(test.cmp.currentPage).toBe(10);
$$(test.env.root).trigger(QueryEvents.newQuery, {
origin: 'nope not the pager'
});
expect(test.cmp.currentPage).toBe(1);
});
describe('when queries are performed', () => {
const execQuery = (
test: Mock.IBasicComponentSetup<Pager>,
resultsPerPage: number,
firstResult: number,
numberOfResults: number,
origin?
) => {
test.env.searchInterface.resultsPerPage = resultsPerPage;
const queryBuilder = new QueryBuilder();
queryBuilder.numberOfResults = resultsPerPage;
queryBuilder.firstResult = firstResult;
const simulation = Simulate.query(test.env, {
query: queryBuilder.build(),
queryBuilder,
results: FakeResults.createFakeResults(numberOfResults),
origin
});
return {
test,
simulation
};
};
it('should adapt itself to the number of results on each new query', () => {
// 10 results per page : show full pager
// Page 1 to 5
execQuery(test, 10, 0, 1000);
let anchors = $$(test.cmp.element).findAll('a.coveo-pager-list-item-text');
expect($$(anchors[0]).text()).toBe('1');
expect($$(anchors[anchors.length - 1]).text()).toBe('5');
// 500 results per page : only 2 page available
// Page 1 to 2
execQuery(test, 500, 0, 1000);
anchors = $$(test.cmp.element).findAll('a.coveo-pager-list-item-text');
expect($$(anchors[0]).text()).toBe('1');
expect($$(anchors[anchors.length - 1]).text()).toBe('2');
});
it('should return to the last valid page when there is no results and the numberOfResults per page is no standard', () => {
$$(test.env.root).on(QueryEvents.noResults, (e, args: INoResultsEventArgs) => {
expect(args.retryTheQuery).toBe(true);
});
test.cmp.currentPage = 11;
execQuery(test, 100, 1000, 0, test.cmp);
expect(test.cmp.currentPage).toBe(10);
});
it('should return to the last valid page when there are less results than expected', done => {
const { simulation } = execQuery(test, 10, 30, 0, test.cmp);
simulation.results.totalCountFiltered = 29;
simulation.results.totalCount = 29;
Simulate.query(test.env, {
query: simulation.query,
queryBuilder: simulation.queryBuilder,
results: simulation.results,
origin: test.cmp
});
Defer.defer(() => {
// started at page 4
// expected to receive more than 30 results in total but received only 29
// Should go back to last valid page, which is page 3
expect(test.cmp.currentPage).toBe(3);
done();
});
});
it(`when having a resultPerPage that is not a divider of maximumNumberOfResultsFromIndex
should prevent requesting more result than the maximumNumberOfResultsFromIndex option`, () => {
const resultsPerPage = 12;
const pageNumber = Math.ceil(test.cmp.options.maximumNumberOfResultsFromIndex / resultsPerPage);
const firstResult = (pageNumber - 1) * resultsPerPage;
test.cmp.setPage(pageNumber);
const { simulation } = execQuery(test, resultsPerPage, firstResult, 0, test.cmp);
expect(simulation.queryBuilder.numberOfResults).toBe(test.cmp.options.maximumNumberOfResultsFromIndex - firstResult);
});
});
describe('analytics', () => {
it('should log the proper event when selecting a page directly', () => {
test.cmp.setPage(15);
expect(test.env.usageAnalytics.logCustomEvent).toHaveBeenCalledWith(
analyticsActionCauseList.pagerNumber,
{ pagerNumber: 15 },
test.cmp.element
);
});
it('should log the proper event when hitting next page', () => {
test.cmp.nextPage();
expect(test.env.usageAnalytics.logCustomEvent).toHaveBeenCalledWith(
analyticsActionCauseList.pagerNext,
{ pagerNumber: 2 },
test.cmp.element
);
});
it('should log the proper event when hitting previous page', () => {
test.cmp.setPage(3);
test.cmp.previousPage();
expect(test.env.usageAnalytics.logCustomEvent).toHaveBeenCalledWith(
analyticsActionCauseList.pagerPrevious,
{ pagerNumber: 2 },
test.cmp.element
);
});
});
describe('exposes options', () => {
it('numberOfPages allow to specify the number of pages to render', () => {
test = Mock.optionsComponentSetup<Pager, IPagerOptions>(Pager, <IPagerOptions>{
numberOfPages: 22
});
Simulate.query(test.env, {
results: FakeResults.createFakeResults(1000)
});
expect($$(test.cmp.element).findAll('a.coveo-pager-list-item-text').length).toBe(22);
});
it('enableNavigationButton can enable or disable nav buttons', () => {
test = Mock.optionsComponentSetup<Pager, IPagerOptions>(Pager, <IPagerOptions>{
enableNavigationButton: true
});
const builder = new QueryBuilder();
builder.firstResult = 70;
Simulate.query(test.env, {
query: builder.build(),
results: FakeResults.createFakeResults(1000)
});
expect($$(test.cmp.element).findAll('.coveo-pager-previous').length).toBe(1);
expect($$(test.cmp.element).findAll('.coveo-pager-next').length).toBe(1);
test = Mock.optionsComponentSetup<Pager, IPagerOptions>(Pager, <IPagerOptions>{
enableNavigationButton: false
});
Simulate.query(test.env, {
query: builder.build(),
results: FakeResults.createFakeResults(1000)
});
expect($$(test.cmp.element).findAll('.coveo-pager-previous').length).toBe(0);
expect($$(test.cmp.element).findAll('.coveo-pager-next').length).toBe(0);
});
it('maximumNumberOfResultsFromIndex allow to specify the maximum last possible result from the index', () => {
test = Mock.optionsComponentSetup<Pager, IPagerOptions>(Pager, <IPagerOptions>{
maximumNumberOfResultsFromIndex: 31
});
const builder = new QueryBuilder();
builder.firstResult = 30;
Simulate.query(test.env, {
query: builder.build(),
results: FakeResults.createFakeResults(1000) // return much more results than 31, but the option should still work properly
});
const anchors = $$(test.cmp.element).findAll('a.coveo-pager-list-item-text');
// 31 results max from the index
// divided by 10 results per page
// means 4 pages
expect($$(anchors[anchors.length - 1]).text()).toBe('4');
});
});
});
} | the_stack |
import { List } from 'immutable';
import { Class, Instance, isInstanceOf } from 'immutable-class';
import { Timezone, Duration } from 'chronoshift';
import { $, r, Expression, LiteralExpression, ExpressionJS, InAction, Set, Range, TimeRange } from 'swiv-plywood';
import { immutableListsEqual } from '../../utils/general/general';
import { Dimension } from '../dimension/dimension';
import { FilterClause, FilterClauseJS, FilterSelection } from '../filter-clause/filter-clause';
function withholdClause(clauses: List<FilterClause>, clause: FilterClause, allowIndex: number): List<FilterClause> {
return <List<FilterClause>>clauses.filter((c, i) => {
return i === allowIndex || !c.equals(clause);
});
}
function swapClause(clauses: List<FilterClause>, clause: FilterClause, other: FilterClause, allowIndex: number): List<FilterClause> {
return <List<FilterClause>>clauses.map((c, i) => {
return (i === allowIndex || !c.equals(clause)) ? c : other;
});
}
function dateToFileString(date: Date): string {
return date.toISOString()
.replace('T', '_')
.replace('Z', '')
.replace('.000', '');
}
export type FilterMode = 'exclude' | 'include' | 'regex' | 'contains';
export type FilterValue = List<FilterClause>;
export type FilterJS = ExpressionJS | string;
var check: Class<FilterValue, FilterJS>;
export class Filter implements Instance<FilterValue, FilterJS> {
static EMPTY: Filter;
static EXCLUDED: FilterMode = 'exclude';
static INCLUDED: FilterMode = 'include';
static REGEX: FilterMode = 'regex';
static CONTAINS: FilterMode = 'contains';
static isFilter(candidate: any): candidate is Filter {
return isInstanceOf(candidate, Filter);
}
static fromClause(clause: FilterClause): Filter {
if (!clause) throw new Error('must have clause');
return new Filter(List([clause]));
}
static fromJS(parameters: FilterJS): Filter {
var expression = Expression.fromJSLoose(parameters);
var clauses: FilterClause[] = null;
if (expression.equals(Expression.TRUE)) {
clauses = [];
} else {
clauses = (expression.getExpressionPattern('and') || [expression]).map(c => FilterClause.fromExpression(c));
}
return new Filter(<List<FilterClause>>List(clauses));
}
public clauses: List<FilterClause>;
constructor(parameters: FilterValue) {
this.clauses = parameters;
}
public valueOf(): FilterValue {
return this.clauses;
}
public toJS(): FilterJS {
return this.toExpression().toJS();
}
public toJSON(): FilterJS {
return this.toJS();
}
public toString() {
return this.clauses.map(clause => clause.toString()).join(' and ');
}
public equals(other: Filter): boolean {
return Filter.isFilter(other) &&
immutableListsEqual(this.clauses, other.clauses);
}
public replaceByIndex(index: number, replace: FilterClause): Filter {
var { clauses } = this;
if (clauses.size === index) return this.insertByIndex(index, replace);
var replacedClause = clauses.get(index);
clauses = <List<FilterClause>>clauses.map((c, i) => i === index ? replace : c);
clauses = swapClause(clauses, replace, replacedClause, index);
return new Filter(clauses);
}
public insertByIndex(index: number, insert: FilterClause): Filter {
var { clauses } = this;
clauses = <List<FilterClause>>clauses.splice(index, 0, insert);
clauses = withholdClause(clauses, insert, index);
return new Filter(clauses);
}
public empty(): boolean {
return this.clauses.size === 0;
}
public single(): boolean {
return this.clauses.size === 1;
}
public length(): number {
return this.clauses.size;
}
public toExpression(): Expression {
var clauses = this.clauses.toArray().map(clause => {
return clause.toExpression();
});
switch (clauses.length) {
case 0:
return Expression.TRUE;
case 1:
return clauses[0];
default:
return Expression.and(clauses);
}
}
public isEmpty(): boolean {
return this.clauses.isEmpty();
}
public isRelative(): boolean {
return this.clauses.some(clause => clause.relative);
}
public getSpecificFilter(now: Date, maxTime: Date, timezone: Timezone): Filter {
if (!this.isRelative()) return this;
return new Filter(this.clauses.map(c => c.evaluate(now, maxTime, timezone)) as List<FilterClause>);
}
private indexOfClause(attribute: Expression): number {
return this.clauses.findIndex(clause => clause.expression.equals(attribute));
}
public clauseForExpression(attribute: Expression): FilterClause {
return this.clauses.find(clause => clause.expression.equals(attribute));
}
public filteredOn(attribute: Expression): boolean {
return this.indexOfClause(attribute) !== -1;
}
public filteredOnValue(attribute: Expression, value: any): boolean {
var clauses = this.clauses;
var index = this.indexOfClause(attribute);
if (index === -1) return false;
return clauses.get(index).getLiteralSet().contains(value);
}
public addValue(attribute: Expression, value: any): Filter {
var clauses = this.clauses;
var index = this.indexOfClause(attribute);
if (index === -1) {
return new Filter(<List<FilterClause>>clauses.concat(new FilterClause({
expression: attribute,
selection: r(Set.fromJS([value]))
})));
} else {
var clause = clauses.get(index);
var newSet = clause.getLiteralSet().add(value);
return new Filter(<List<FilterClause>>clauses.splice(index, 1, clause.changeSelection(r(newSet))));
}
}
public remove(attribute: Expression): Filter {
var clauses = this.clauses;
var index = this.indexOfClause(attribute);
if (index === -1) return this;
return new Filter(clauses.delete(index));
}
public removeValue(attribute: Expression, value: any): Filter {
var clauses = this.clauses;
var index = this.indexOfClause(attribute);
if (index === -1) return this;
var clause = clauses.get(index);
var newSet = clause.getLiteralSet().remove(value);
if (newSet.empty()) {
return new Filter(clauses.delete(index));
} else {
clauses = <List<FilterClause>>clauses.splice(index, 1, clause.changeSelection(r(newSet)));
return new Filter(clauses);
}
}
public toggleValue(attribute: Expression, value: any): Filter {
return this.filteredOnValue(attribute, value) ? this.removeValue(attribute, value) : this.addValue(attribute, value);
}
public getSelection(attribute: Expression): FilterSelection {
var clauses = this.clauses;
var index = this.indexOfClause(attribute);
if (index === -1) return null;
return clauses.get(index).selection;
}
public setSelection(attribute: Expression, selection: Expression): Filter {
var clauses = this.clauses;
var index = this.indexOfClause(attribute);
var newClause = new FilterClause({
expression: attribute,
selection
});
if (index === -1) {
clauses = <List<FilterClause>>clauses.push(newClause);
} else {
clauses = <List<FilterClause>>clauses.splice(index, 1, newClause);
}
return new Filter(clauses);
}
public getExtent(attribute: Expression): Range<any> {
var clauses = this.clauses;
var index = this.indexOfClause(attribute);
if (index === -1) return null;
return clauses.get(index).getExtent();
}
public getFileString(timeAttribute: Expression) {
var nonTimeClauseSize = this.clauses.size;
const timeRange = this.getExtent(timeAttribute); // ToDo: revisit this
const nonTimeFilters = ((nonTimeClauseSize: number) => {
return nonTimeClauseSize === 0 ? "" : `_filters-${nonTimeClauseSize}`;
});
if (timeRange) {
var { start, end } = timeRange;
nonTimeClauseSize--;
return `${dateToFileString(start)}_${dateToFileString(end)}${nonTimeFilters(nonTimeClauseSize)}`;
}
return nonTimeFilters(nonTimeClauseSize);
}
public getLiteralSet(attribute: Expression): Set {
var clauses = this.clauses;
var index = this.indexOfClause(attribute);
if (index === -1) return null;
return clauses.get(index).getLiteralSet();
}
public getClausesForDimension(dimension: Dimension): List<FilterClause> {
return this.clauses.filter((clause) => {
return clause.expression.equals(dimension.expression);
}) as List<FilterClause>;
}
public getModeForDimension(dimension: Dimension): FilterMode {
var dimensionClauses = this.getClausesForDimension(dimension);
if (dimensionClauses.size > 0) {
if (dimensionClauses.every(clause => clause.action === 'match')) return 'regex';
if (dimensionClauses.every(clause => clause.action === 'contains')) return 'contains';
if (dimensionClauses.every(clause => clause.exclude)) return 'exclude';
return 'include';
}
return undefined;
}
public setClause(expression: FilterClause): Filter {
var expressionAttribute = expression.expression;
var added = false;
var newOperands = <List<FilterClause>>this.clauses.map((clause) => {
if (clause.expression.equals(expressionAttribute)) {
added = true;
return expression;
} else {
return clause;
}
});
if (!added) {
newOperands = newOperands.push(expression);
}
return new Filter(newOperands);
}
public applyDelta(delta: Filter): Filter {
var newFilter: Filter = this;
var deltaClauses = delta.clauses;
deltaClauses.forEach((deltaClause) => {
newFilter = newFilter.setClause(deltaClause);
});
return newFilter;
}
public getSingleClauseSet(): Set {
var clauses = this.clauses;
if (clauses.size !== 1) return null;
return clauses.get(0).getLiteralSet();
}
public constrainToDimensions(dimensions: List<Dimension>, timeAttribute: Expression, oldTimeAttribute: Expression = null): Filter {
var hasChanged = false;
var clauses: FilterClause[] = [];
this.clauses.forEach((clause) => {
var clauseExpression = clause.expression;
if (Dimension.getDimensionByExpression(dimensions, clauseExpression)) {
clauses.push(clause);
} else {
hasChanged = true;
// Special handling for time filter
if (timeAttribute && oldTimeAttribute && oldTimeAttribute.equals(clauseExpression)) {
clauses.push(new FilterClause({
expression: timeAttribute,
selection: clause.selection
}));
}
}
});
return hasChanged ? new Filter(List(clauses)) : this;
}
public getDifferentAttributes(other: Filter): Expression[] {
var diff: Expression[] = [];
this.clauses.forEach((clause) => {
var clauseExpression = clause.expression;
var otherClause = other.clauseForExpression(clauseExpression);
if (!clause.equals(otherClause)) {
diff.push(clauseExpression);
}
});
return diff;
}
public overQuery(duration: Duration, timezone: Timezone, timeAttribute: Expression): Filter {
if (!timeAttribute) return this;
return new Filter(<List<FilterClause>>this.clauses.map((clause) => {
if (clause.expression.equals(timeAttribute)) {
var timeRange: TimeRange = clause.getExtent() as TimeRange;
var newTimeRange = new TimeRange({
start: duration.shift(timeRange.start, timezone, -1),
end: duration.shift(timeRange.end, timezone, 1)
});
return clause.changeSelection(r(newTimeRange));
} else {
return clause;
}
}));
}
public setExclusionforDimension(exclusion: boolean, dimension: Dimension): Filter {
var clauses = this.clauses.map((clause: FilterClause) => {
if (!clause.expression.equals(dimension.expression)) return clause;
return clause.changeExclude(exclusion);
});
return new Filter(clauses as List<FilterClause>);
}
}
check = Filter;
Filter.EMPTY = new Filter(<List<FilterClause>>List()); | the_stack |
import * as fs from 'fs';
import * as path from 'path';
import * as vscode from 'vscode';
import { getConfig } from './config';
import { DataSource } from './dataSource';
import { DEFAULT_REPO_STATE, ExtensionState } from './extensionState';
import { Logger } from './logger';
import { BooleanOverride, ErrorInfo, FileViewType, GitRepoSet, GitRepoState, PullRequestConfig, PullRequestConfigBase, PullRequestProvider, RepoCommitOrdering } from './types';
import { evalPromises, getPathFromStr, getPathFromUri, getRepoName, pathWithTrailingSlash, realpath, showErrorMessage, showInformationMessage } from './utils';
import { BufferedQueue } from './utils/bufferedQueue';
import { Disposable, toDisposable } from './utils/disposable';
import { Event, EventEmitter } from './utils/event';
export interface RepoChangeEvent {
readonly repos: GitRepoSet;
readonly numRepos: number;
readonly loadRepo: string | null;
}
/**
* Detects and manages repositories in Git Graph.
*/
export class RepoManager extends Disposable {
private readonly dataSource: DataSource;
private readonly extensionState: ExtensionState;
private readonly logger: Logger;
private repos: GitRepoSet;
private ignoredRepos: string[];
private maxDepthOfRepoSearch: number;
private readonly folderWatchers: { [workspace: string]: vscode.FileSystemWatcher } = {};
private readonly configWatcher: vscode.FileSystemWatcher;
private readonly repoEventEmitter: EventEmitter<RepoChangeEvent>;
private readonly onWatcherCreateQueue: BufferedQueue<string>;
private readonly onWatcherChangeQueue: BufferedQueue<string>;
private readonly checkRepoConfigQueue: BufferedQueue<string>;
/**
* Creates the Git Graph Repository Manager, and runs startup tasks.
* @param dataSource The Git Graph DataSource instance.
* @param extensionState The Git Graph ExtensionState instance.
* @param logger The Git Graph Logger instance.
*/
constructor(dataSource: DataSource, extensionState: ExtensionState, onDidChangeConfiguration: Event<vscode.ConfigurationChangeEvent>, logger: Logger) {
super();
this.dataSource = dataSource;
this.extensionState = extensionState;
this.logger = logger;
this.repos = extensionState.getRepos();
this.ignoredRepos = extensionState.getIgnoredRepos();
this.maxDepthOfRepoSearch = getConfig().maxDepthOfRepoSearch;
this.configWatcher = vscode.workspace.createFileSystemWatcher('**/.vscode/vscode-git-graph.json');
this.configWatcher.onDidCreate(this.onConfigWatcherCreateOrChange.bind(this));
this.configWatcher.onDidChange(this.onConfigWatcherCreateOrChange.bind(this));
this.repoEventEmitter = new EventEmitter<RepoChangeEvent>();
this.onWatcherCreateQueue = new BufferedQueue<string>(this.processOnWatcherCreateEvent.bind(this), this.sendRepos.bind(this));
this.onWatcherChangeQueue = new BufferedQueue<string>(this.processOnWatcherChangeEvent.bind(this), this.sendRepos.bind(this));
this.checkRepoConfigQueue = new BufferedQueue<string>(this.checkRepoForNewConfig.bind(this), this.sendRepos.bind(this));
this.startupTasks();
this.registerDisposables(
// Monitor changes to the workspace folders to:
// - search added folders for repositories
// - remove repositories within deleted folders
// - apply changes to the order of workspace folders
vscode.workspace.onDidChangeWorkspaceFolders(async (e) => {
let changes = false, path;
if (e.added.length > 0) {
for (let i = 0; i < e.added.length; i++) {
path = getPathFromUri(e.added[i].uri);
if (await this.searchDirectoryForRepos(path, this.maxDepthOfRepoSearch)) changes = true;
this.startWatchingFolder(path);
}
}
if (e.removed.length > 0) {
for (let i = 0; i < e.removed.length; i++) {
path = getPathFromUri(e.removed[i].uri);
if (this.removeReposWithinFolder(path)) changes = true;
this.stopWatchingFolder(path);
}
}
if (this.updateReposWorkspaceFolderIndex()) {
this.extensionState.saveRepos(this.repos);
changes = true;
}
if (changes) {
this.sendRepos();
}
}),
// Monitor changes to the maxDepthOfRepoSearch Extension Setting, and trigger a new search if needed
onDidChangeConfiguration((event) => {
if (event.affectsConfiguration('git-graph.maxDepthOfRepoSearch')) {
this.maxDepthOfRepoSearchChanged();
}
}),
// Dispose the Repository Event Emitter when disposed
this.repoEventEmitter,
// Dispose the configWatcher
this.configWatcher,
// Dispose the onWatcherCreateQueue
this.onWatcherCreateQueue,
// Dispose the onWatcherChangeQueue
this.onWatcherChangeQueue,
// Dispose the checkRepoConfigQueue,
this.checkRepoConfigQueue,
// Stop watching folders when disposed
toDisposable(() => {
const folders = Object.keys(this.folderWatchers);
for (let i = 0; i < folders.length; i++) {
this.stopWatchingFolder(folders[i]);
}
})
);
}
/**
* Get the Event that can be used to subscribe to updates when the repositories available in Git Graph change.
*/
get onDidChangeRepos() {
return this.repoEventEmitter.subscribe;
}
/**
* Apply the new value of `git-graph.maxDepthOfRepoSearch` to the RepoManager.
*/
private maxDepthOfRepoSearchChanged() {
const newDepth = getConfig().maxDepthOfRepoSearch;
if (newDepth > this.maxDepthOfRepoSearch) {
this.maxDepthOfRepoSearch = newDepth;
this.searchWorkspaceForRepos();
} else {
this.maxDepthOfRepoSearch = newDepth;
}
}
/**
* Run various startup tasks when Git Graph is activated.
*/
private async startupTasks() {
this.removeReposNotInWorkspace();
if (this.updateReposWorkspaceFolderIndex()) {
this.extensionState.saveRepos(this.repos);
}
if (!await this.checkReposExist()) {
// On startup, ensure that sendRepo is called (even if no changes were made)
this.sendRepos();
}
this.checkReposForNewConfig();
await this.checkReposForNewSubmodules();
await this.searchWorkspaceForRepos();
this.startWatchingFolders();
}
/**
* Remove any repositories that are no longer in the current workspace.
*/
private removeReposNotInWorkspace() {
const workspaceFolderInfo = getWorkspaceFolderInfoForRepoInclusionMapping();
const rootsExact = workspaceFolderInfo.rootsExact, rootsFolder = workspaceFolderInfo.rootsFolder, repoPaths = Object.keys(this.repos);
for (let i = 0; i < repoPaths.length; i++) {
const repoPathFolder = pathWithTrailingSlash(repoPaths[i]);
if (rootsExact.indexOf(repoPaths[i]) === -1 && !rootsFolder.find(root => repoPaths[i].startsWith(root)) && !rootsExact.find(root => root.startsWith(repoPathFolder))) {
this.removeRepo(repoPaths[i]);
}
}
}
/**
* Register a new repository with Git Graph.
* @param path The path of the repository.
* @param loadRepo If TRUE and the Git Graph View is visible, load the Git Graph View with the repository being registered.
*/
public registerRepo(path: string, loadRepo: boolean) {
return new Promise<{ root: string | null, error: string | null }>(async resolve => {
let root = await this.dataSource.repoRoot(path);
if (root === null) {
resolve({ root: null, error: 'The folder "' + path + '" is not a Git repository.' });
} else if (typeof this.repos[root] !== 'undefined') {
resolve({ root: null, error: 'The folder "' + path + '" is contained within the known repository "' + root + '".' });
} else {
if (this.ignoredRepos.includes(root)) {
this.ignoredRepos.splice(this.ignoredRepos.indexOf(root), 1);
this.extensionState.setIgnoredRepos(this.ignoredRepos);
}
await this.addRepo(root);
this.sendRepos(loadRepo ? root : null);
resolve({ root: root, error: null });
}
});
}
/**
* Ignore a repository known to Git Graph. Unlike `removeRepo`, ignoring the repository will prevent it from being automatically detected and re-added the next time Visual Studio Code is started.
* @param repo The path of the repository.
* @returns TRUE => Repository was ignored, FALSE => Repository is not know to Git Graph.
*/
public ignoreRepo(repo: string) {
if (this.isKnownRepo(repo)) {
if (!this.ignoredRepos.includes(repo)) this.ignoredRepos.push(repo);
this.extensionState.setIgnoredRepos(this.ignoredRepos);
this.removeRepo(repo);
this.sendRepos();
return true;
} else {
return false;
}
}
/* Repo Management */
/**
* Get a set of all known repositories in the current workspace.
* @returns The set of repositories.
*/
public getRepos() {
return Object.assign({}, this.repos);
}
/**
* Get the number of all known repositories in the current workspace.
* @returns The number of repositories.
*/
public getNumRepos() {
return Object.keys(this.repos).length;
}
/**
* Get the repository that contains the specified file.
* @param path The path of the file.
* @returns The path of the repository containing the file, or NULL if no known repository contains the file.
*/
public getRepoContainingFile(path: string) {
let repoPaths = Object.keys(this.repos), repo = null;
for (let i = 0; i < repoPaths.length; i++) {
if (path.startsWith(pathWithTrailingSlash(repoPaths[i])) && (repo === null || repo.length < repoPaths[i].length)) repo = repoPaths[i];
}
return repo;
}
/**
* Get all known repositories that are contained in the specified folder.
* @param path The path of the folder.
* @returns An array of the paths of all known repositories contained in the specified folder.
*/
private getReposInFolder(path: string) {
let pathFolder = pathWithTrailingSlash(path), repoPaths = Object.keys(this.repos), reposInFolder: string[] = [];
for (let i = 0; i < repoPaths.length; i++) {
if (repoPaths[i] === path || repoPaths[i].startsWith(pathFolder)) reposInFolder.push(repoPaths[i]);
}
return reposInFolder;
}
/**
* Get the path of the known repository matching the specified repository path (checking symbolic links if necessary).
* @param repo The path of the repository.
* @returns The path of the known repository, or NULL if the specified repository is unknown.
*/
public async getKnownRepo(repo: string) {
if (this.isKnownRepo(repo)) {
// The path is already known as a repo
return repo;
}
// Check to see if a known repository contains a symlink that resolves the repo
let canonicalRepo = await realpath(repo);
let repoPaths = Object.keys(this.repos);
for (let i = 0; i < repoPaths.length; i++) {
if (canonicalRepo === (await realpath(repoPaths[i]))) {
return repoPaths[i];
}
}
// Repo is unknown
return null;
}
/**
* Check to see if a repository exactly matches a known repository.
* @param repo The path of the repository to check.
* @returns TRUE => Known repository, FALSE => Unknown repository.
*/
public isKnownRepo(repo: string) {
return typeof this.repos[repo] !== 'undefined';
}
/**
* Add a new repository to Git Graph.
* @param repo The path of the repository.
* @returns TRUE => The repository was added, FALSE => The repository is ignored and couldn't be added.
*/
private async addRepo(repo: string) {
if (this.ignoredRepos.includes(repo)) {
return false;
} else {
this.repos[repo] = Object.assign({}, DEFAULT_REPO_STATE);
this.updateReposWorkspaceFolderIndex(repo);
this.extensionState.saveRepos(this.repos);
this.logger.log('Added new repo: ' + repo);
await this.checkRepoForNewConfig(repo, true);
await this.searchRepoForSubmodules(repo);
return true;
}
}
/**
* Remove a known repository from Git Graph.
* @param repo The path of the repository.
*/
private removeRepo(repo: string) {
delete this.repos[repo];
this.extensionState.saveRepos(this.repos);
this.logger.log('Removed repo: ' + repo);
}
/**
* Remove all repositories that are contained within the specified folder.
* @param path The path of the folder.
* @returns TRUE => At least one repository was removed, FALSE => No repositories were removed.
*/
private removeReposWithinFolder(path: string) {
let reposInFolder = this.getReposInFolder(path);
for (let i = 0; i < reposInFolder.length; i++) {
this.removeRepo(reposInFolder[i]);
}
return reposInFolder.length > 0;
}
/**
* Checks if the specified path is within a known repository.
* @param path The path to check.
* @returns TRUE => Path is within a known repository, FALSE => Path isn't within a known repository.
*/
private isDirectoryWithinRepos(path: string) {
let repoPaths = Object.keys(this.repos);
for (let i = 0; i < repoPaths.length; i++) {
if (path === repoPaths[i] || path.startsWith(pathWithTrailingSlash(repoPaths[i]))) return true;
}
return false;
}
/**
* Send the latest set of known repositories to subscribers as they have changed.
* @param loadRepo The optional path of a repository to load in the Git Graph View.
*/
private sendRepos(loadRepo: string | null = null) {
this.repoEventEmitter.emit({
repos: this.getRepos(),
numRepos: this.getNumRepos(),
loadRepo: loadRepo
});
}
/**
* Check that all known repositories still exist. If they don't, remove them.
* @returns TRUE => At least one repository was removed or transferred, FALSE => No repositories were removed.
*/
public checkReposExist() {
let repoPaths = Object.keys(this.repos), changes = false;
return evalPromises(repoPaths, 3, (path) => this.dataSource.repoRoot(path)).then((results) => {
for (let i = 0; i < repoPaths.length; i++) {
if (results[i] === null) {
this.removeRepo(repoPaths[i]);
changes = true;
} else if (repoPaths[i] !== results[i]) {
this.transferRepoState(repoPaths[i], results[i]!);
changes = true;
}
}
}).catch(() => { }).then(() => {
if (changes) {
this.sendRepos();
}
return changes;
});
}
/**
* Update each repositories workspaceFolderIndex based on the current workspace.
* @param repo If provided, only update this specific repository.
* @returns TRUE => At least one repository was changed, FALSE => No repositories were changed.
*/
private updateReposWorkspaceFolderIndex(repo: string | null = null) {
const workspaceFolderInfo = getWorkspaceFolderInfoForRepoInclusionMapping();
const rootsExact = workspaceFolderInfo.rootsExact, rootsFolder = workspaceFolderInfo.rootsFolder, workspaceFolders = workspaceFolderInfo.workspaceFolders;
const repoPaths = repo !== null && this.isKnownRepo(repo) ? [repo] : Object.keys(this.repos);
let changes = false, rootIndex: number, workspaceFolderIndex: number | null;
for (let i = 0; i < repoPaths.length; i++) {
rootIndex = rootsExact.indexOf(repoPaths[i]);
if (rootIndex === -1) {
// Find a workspace folder that contains the repository
rootIndex = rootsFolder.findIndex((root) => repoPaths[i].startsWith(root));
}
if (rootIndex === -1) {
// Find a workspace folder that is contained within the repository
const repoPathFolder = pathWithTrailingSlash(repoPaths[i]);
rootIndex = rootsExact.findIndex((root) => root.startsWith(repoPathFolder));
}
workspaceFolderIndex = rootIndex > -1 ? workspaceFolders[rootIndex].index : null;
if (this.repos[repoPaths[i]].workspaceFolderIndex !== workspaceFolderIndex) {
this.repos[repoPaths[i]].workspaceFolderIndex = workspaceFolderIndex;
changes = true;
}
}
return changes;
}
/**
* Set the state of a known repository.
* @param repo The repository the state belongs to.
* @param state The state.
*/
public setRepoState(repo: string, state: GitRepoState) {
this.repos[repo] = state;
this.extensionState.saveRepos(this.repos);
}
/**
* Transfer the repository state from one known repository to another.
* @param oldRepo The repository to transfer the state from.
* @param newRepo The repository to transfer the state to.
*/
private transferRepoState(oldRepo: string, newRepo: string) {
this.repos[newRepo] = this.repos[oldRepo];
delete this.repos[oldRepo];
this.updateReposWorkspaceFolderIndex(newRepo);
this.extensionState.saveRepos(this.repos);
this.extensionState.transferRepo(oldRepo, newRepo);
this.logger.log('Transferred repo state: ' + oldRepo + ' -> ' + newRepo);
}
/* Repo Searching */
/**
* Search all of the current workspace folders for new repositories (and add them).
* @returns TRUE => At least one repository was added, FALSE => No repositories were added.
*/
public async searchWorkspaceForRepos() {
this.logger.log('Searching workspace for new repos ...');
let rootFolders = vscode.workspace.workspaceFolders, changes = false;
if (typeof rootFolders !== 'undefined') {
for (let i = 0; i < rootFolders.length; i++) {
if (await this.searchDirectoryForRepos(getPathFromUri(rootFolders[i].uri), this.maxDepthOfRepoSearch)) changes = true;
}
}
this.logger.log('Completed searching workspace for new repos');
if (changes) this.sendRepos();
return changes;
}
/**
* Search the specified directory for new repositories (and add them).
* @param directory The path of the directory to search.
* @param maxDepth The maximum depth to recursively search.
* @returns TRUE => At least one repository was added, FALSE => No repositories were added.
*/
private searchDirectoryForRepos(directory: string, maxDepth: number) {
return new Promise<boolean>(resolve => {
if (this.isDirectoryWithinRepos(directory)) {
resolve(false);
return;
}
this.dataSource.repoRoot(directory).then(async (root) => {
if (root !== null) {
resolve(await this.addRepo(root));
} else if (maxDepth > 0) {
fs.readdir(directory, async (err, dirContents) => {
if (err) {
resolve(false);
} else {
let dirs = [];
for (let i = 0; i < dirContents.length; i++) {
if (dirContents[i] !== '.git' && await isDirectory(directory + '/' + dirContents[i])) {
dirs.push(directory + '/' + dirContents[i]);
}
}
resolve((await evalPromises(dirs, 2, dir => this.searchDirectoryForRepos(dir, maxDepth - 1))).indexOf(true) > -1);
}
});
} else {
resolve(false);
}
}).catch(() => resolve(false));
});
}
/**
* Check the know repositories for any new submodules (and add them).
*/
private async checkReposForNewSubmodules() {
let repoPaths = Object.keys(this.repos), changes = false;
for (let i = 0; i < repoPaths.length; i++) {
if (await this.searchRepoForSubmodules(repoPaths[i])) changes = true;
}
if (changes) this.sendRepos();
}
/**
* Search a repository for any new submodules (and add them).
* @param repo The path of the repository to search.
* @returns TRUE => At least one submodule was added, FALSE => No submodules were added.
*/
private async searchRepoForSubmodules(repo: string) {
let submodules = await this.dataSource.getSubmodules(repo), changes = false;
for (let i = 0; i < submodules.length; i++) {
if (!this.isKnownRepo(submodules[i])) {
if (await this.addRepo(submodules[i])) changes = true;
}
}
return changes;
}
/* Workspace Folder Watching */
/**
* Start watching each of the folders in the current workspace for changes.
*/
private startWatchingFolders() {
let rootFolders = vscode.workspace.workspaceFolders;
if (typeof rootFolders !== 'undefined') {
for (let i = 0; i < rootFolders.length; i++) {
this.startWatchingFolder(getPathFromUri(rootFolders[i].uri));
}
}
}
/**
* Start watching the specified directory for file system events.
* @param path The path of the directory.
*/
private startWatchingFolder(path: string) {
const watcher = vscode.workspace.createFileSystemWatcher(path + '/**');
watcher.onDidCreate(this.onWatcherCreate.bind(this));
watcher.onDidChange(this.onWatcherChange.bind(this));
watcher.onDidDelete(this.onWatcherDelete.bind(this));
this.folderWatchers[path] = watcher;
}
/**
* Stop watching the specified directory for file system events.
* @param path The path of the directory.
*/
private stopWatchingFolder(path: string) {
this.folderWatchers[path].dispose();
delete this.folderWatchers[path];
}
/**
* Handle a file system creation event.
* @param uri The URI of the creation event.
*/
private onWatcherCreate(uri: vscode.Uri) {
let path = getPathFromUri(uri);
if (path.indexOf('/.git/') > -1) return;
if (path.endsWith('/.git')) path = path.slice(0, -5);
this.onWatcherCreateQueue.enqueue(path);
}
/**
* Handle a file system change event.
* @param uri The URI of the change event.
*/
private onWatcherChange(uri: vscode.Uri) {
let path = getPathFromUri(uri);
if (path.indexOf('/.git/') > -1) return;
if (path.endsWith('/.git')) path = path.slice(0, -5);
this.onWatcherChangeQueue.enqueue(path);
}
/**
* Handle a file system deletion event.
* @param uri The URI of the deletion event.
*/
private onWatcherDelete(uri: vscode.Uri) {
let path = getPathFromUri(uri);
if (path.indexOf('/.git/') > -1) return;
if (path.endsWith('/.git')) path = path.slice(0, -5);
if (this.removeReposWithinFolder(path)) this.sendRepos();
}
/**
* Process a file system creation event.
* @param path The path of the file that was created.
* @returns TRUE => Change was made. FALSE => No change was made.
*/
private async processOnWatcherCreateEvent(path: string) {
if (await isDirectory(path)) {
if (await this.searchDirectoryForRepos(path, this.maxDepthOfRepoSearch)) {
return true;
}
}
return false;
}
/**
* Process a file system change event.
* @param path The path of the file that was changed.
* @returns TRUE => Change was made. FALSE => No change was made.
*/
private async processOnWatcherChangeEvent(path: string) {
if (!await doesPathExist(path)) {
if (this.removeReposWithinFolder(path)) {
return true;
}
}
return false;
}
/* Repository Configuration Management */
/**
* Check the known repositories for new configuration files.
*/
private checkReposForNewConfig() {
Object.keys(this.repos).forEach((repo) => this.checkRepoConfigQueue.enqueue(repo));
}
/**
* Check to see if the repository has a new configuration file.
* @param repo The repository to check.
* @param isRepoNew Is the repository new (was it just added)
*/
private async checkRepoForNewConfig(repo: string, isRepoNew: boolean = false) {
try {
const file = await readExternalConfigFile(repo);
const state = this.repos[repo];
if (state && file !== null && typeof file.exportedAt === 'number' && file.exportedAt > state.lastImportAt) {
const validationError = validateExternalConfigFile(file);
if (validationError === null) {
const action = isRepoNew ? 'Yes' : await vscode.window.showInformationMessage('A newer Git Graph Repository Configuration File has been detected for the repository "' + (state.name || getRepoName(repo)) + '". Would you like to override your current repository configuration with the new changes?', 'Yes', 'No');
if (this.isKnownRepo(repo) && action) {
const state = this.repos[repo];
if (action === 'Yes') {
applyExternalConfigFile(file, state);
}
state.lastImportAt = file.exportedAt;
this.extensionState.saveRepos(this.repos);
if (!isRepoNew && action === 'Yes') {
showInformationMessage('Git Graph Repository Configuration was successfully imported for the repository "' + (state.name || getRepoName(repo)) + '".');
}
return true;
}
} else {
showErrorMessage('The value for "' + validationError + '" in the configuration file "' + getPathFromStr(path.join(repo, '.vscode', 'vscode-git-graph.json')) + '" is invalid.');
}
}
} catch (_) { }
return false;
}
/**
* Handle a file system create or change event for a configuration file.
* @param uri The URI of the create or change event.
*/
private onConfigWatcherCreateOrChange(uri: vscode.Uri) {
const path = getPathFromUri(uri);
const repo = this.getRepoContainingFile(path);
if (repo !== null) {
this.checkRepoConfigQueue.enqueue(repo);
}
}
/**
* Export a repositories configuration.
* @param repo The path of the repository to export.
* @returns The ErrorInfo produced when performing this action.
*/
public exportRepoConfig(repo: string): Promise<ErrorInfo> {
const file = generateExternalConfigFile(this.repos[repo]);
return writeExternalConfigFile(repo, file).then((message) => {
showInformationMessage(message);
if (this.isKnownRepo(repo)) {
this.repos[repo].lastImportAt = file.exportedAt!;
this.extensionState.saveRepos(this.repos);
}
return null;
}, (error) => error);
}
}
/**
* Gets the current workspace folders, and generates information required to identify whether a repository is within any of the workspace folders.
* @returns The Workspace Folder Information.
*/
function getWorkspaceFolderInfoForRepoInclusionMapping() {
let rootsExact = [], rootsFolder = [], workspaceFolders = vscode.workspace.workspaceFolders || [], path;
for (let i = 0; i < workspaceFolders.length; i++) {
path = getPathFromUri(workspaceFolders[i].uri);
rootsExact.push(path);
rootsFolder.push(pathWithTrailingSlash(path));
}
return {
workspaceFolders: workspaceFolders,
rootsExact: rootsExact,
rootsFolder: rootsFolder
};
}
/**
* Check if the specified path is a directory.
* @param path The path to check.
* @returns TRUE => Directory, FALSE => Not a directory.
*/
function isDirectory(path: string) {
return new Promise<boolean>(resolve => {
fs.stat(path, (err, stats) => {
resolve(err ? false : stats.isDirectory());
});
});
}
/**
* Check if the specified path exists.
* @param path The path to check.
* @returns TRUE => Path exists, FALSE => Path doesn't exist.
*/
function doesPathExist(path: string) {
return new Promise<boolean>(resolve => {
fs.stat(path, err => resolve(!err));
});
}
/** External Repo Config File */
export namespace ExternalRepoConfig {
export const enum FileViewType {
Tree = 'tree',
List = 'list'
}
export interface IssueLinkingConfig {
readonly issue: string;
readonly url: string;
}
export const enum PullRequestProvider {
Bitbucket = 'bitbucket',
Custom = 'custom',
GitHub = 'github',
GitLab = 'gitlab'
}
interface PullRequestConfigBuiltIn extends PullRequestConfigBase {
readonly provider: Exclude<PullRequestProvider, PullRequestProvider.Custom>;
readonly custom: null;
}
interface PullRequestConfigCustom extends PullRequestConfigBase {
readonly provider: PullRequestProvider.Custom;
readonly custom: {
readonly name: string,
readonly templateUrl: string
};
}
export type PullRequestConfig = PullRequestConfigBuiltIn | PullRequestConfigCustom;
export interface File {
commitOrdering?: RepoCommitOrdering;
fileViewType?: FileViewType;
hideRemotes?: string[];
includeCommitsMentionedByReflogs?: boolean;
issueLinkingConfig?: IssueLinkingConfig;
name?: string | null;
onlyFollowFirstParent?: boolean;
onRepoLoadShowCheckedOutBranch?: boolean;
onRepoLoadShowSpecificBranches?: string[];
pullRequestConfig?: PullRequestConfig;
showRemoteBranches?: boolean;
showStashes?: boolean;
showTags?: boolean;
exportedAt?: number;
}
}
/**
* Reads the External Configuration File for a repository from the File System.
* @param repo The path of the repository.
* @returns A promise resolving to the parsed config file, or NULL if the file couldn't be read or parsed.
*/
function readExternalConfigFile(repo: string) {
return new Promise<Readonly<ExternalRepoConfig.File> | null>((resolve) => {
fs.readFile(path.join(repo, '.vscode', 'vscode-git-graph.json'), (err, data) => {
if (err) {
resolve(null);
} else {
try {
const contents = JSON.parse(data.toString());
resolve(typeof contents === 'object' ? contents : null);
} catch (_) {
resolve(null);
}
}
});
});
}
/**
* Writes the External Configuration File of a repository to the File System.
* @param repo The path of the repository.
* @param file The file contents.
* @returns A promise that resolves to a success message, or rejects to an error message.
*/
function writeExternalConfigFile(repo: string, file: ExternalRepoConfig.File) {
return new Promise<string>((resolve, reject) => {
const vscodePath = path.join(repo, '.vscode');
fs.mkdir(vscodePath, (err) => {
if (!err || err.code === 'EEXIST') {
const configPath = path.join(vscodePath, 'vscode-git-graph.json');
fs.writeFile(configPath, JSON.stringify(file, null, 4), (err) => {
if (err) {
reject('Failed to write the Git Graph Repository Configuration File to "' + getPathFromStr(configPath) + '".');
} else {
resolve('Successfully exported the Git Graph Repository Configuration to "' + getPathFromStr(configPath) + '".');
}
});
} else {
reject('An unexpected error occurred while checking if the "' + getPathFromStr(vscodePath) + '" directory exists. This directory is used to store the Git Graph Repository Configuration file.');
}
});
});
}
/**
* Generate the External Config File's contents from the Git Repositories state.
* @param state The state being exported.
* @returns The file contents.
*/
function generateExternalConfigFile(state: GitRepoState): Readonly<ExternalRepoConfig.File> {
const file: ExternalRepoConfig.File = {};
if (state.commitOrdering !== RepoCommitOrdering.Default) {
file.commitOrdering = state.commitOrdering;
}
if (state.fileViewType !== FileViewType.Default) {
switch (state.fileViewType) {
case FileViewType.Tree:
file.fileViewType = ExternalRepoConfig.FileViewType.Tree;
break;
case FileViewType.List:
file.fileViewType = ExternalRepoConfig.FileViewType.List;
break;
}
}
if (state.hideRemotes.length > 0) {
file.hideRemotes = state.hideRemotes;
}
if (state.includeCommitsMentionedByReflogs !== BooleanOverride.Default) {
file.includeCommitsMentionedByReflogs = state.includeCommitsMentionedByReflogs === BooleanOverride.Enabled;
}
if (state.issueLinkingConfig !== null) {
file.issueLinkingConfig = state.issueLinkingConfig;
}
if (state.name !== null) {
file.name = state.name;
}
if (state.onlyFollowFirstParent !== BooleanOverride.Default) {
file.onlyFollowFirstParent = state.onlyFollowFirstParent === BooleanOverride.Enabled;
}
if (state.onRepoLoadShowCheckedOutBranch !== BooleanOverride.Default) {
file.onRepoLoadShowCheckedOutBranch = state.onRepoLoadShowCheckedOutBranch === BooleanOverride.Enabled;
}
if (state.onRepoLoadShowSpecificBranches !== null) {
file.onRepoLoadShowSpecificBranches = state.onRepoLoadShowSpecificBranches;
}
if (state.pullRequestConfig !== null) {
let provider: ExternalRepoConfig.PullRequestProvider;
switch (state.pullRequestConfig.provider) {
case PullRequestProvider.Bitbucket:
provider = ExternalRepoConfig.PullRequestProvider.Bitbucket;
break;
case PullRequestProvider.Custom:
provider = ExternalRepoConfig.PullRequestProvider.Custom;
break;
case PullRequestProvider.GitHub:
provider = ExternalRepoConfig.PullRequestProvider.GitHub;
break;
case PullRequestProvider.GitLab:
provider = ExternalRepoConfig.PullRequestProvider.GitLab;
break;
}
file.pullRequestConfig = Object.assign({}, state.pullRequestConfig, { provider: provider });
}
if (state.showRemoteBranchesV2 !== BooleanOverride.Default) {
file.showRemoteBranches = state.showRemoteBranchesV2 === BooleanOverride.Enabled;
}
if (state.showStashes !== BooleanOverride.Default) {
file.showStashes = state.showStashes === BooleanOverride.Enabled;
}
if (state.showTags !== BooleanOverride.Default) {
file.showTags = state.showTags === BooleanOverride.Enabled;
}
file.exportedAt = (new Date()).getTime();
return file;
}
/**
* Validate an external configuration file.
* @param file The external configuration file.
* @returns NULL => Value, String => The first field that is invalid.
*/
function validateExternalConfigFile(file: Readonly<ExternalRepoConfig.File>) {
if (typeof file.commitOrdering !== 'undefined' && file.commitOrdering !== RepoCommitOrdering.Date && file.commitOrdering !== RepoCommitOrdering.AuthorDate && file.commitOrdering !== RepoCommitOrdering.Topological) {
return 'commitOrdering';
}
if (typeof file.fileViewType !== 'undefined' && file.fileViewType !== ExternalRepoConfig.FileViewType.Tree && file.fileViewType !== ExternalRepoConfig.FileViewType.List) {
return 'fileViewType';
}
if (typeof file.hideRemotes !== 'undefined' && (!Array.isArray(file.hideRemotes) || file.hideRemotes.some((remote) => typeof remote !== 'string'))) {
return 'hideRemotes';
}
if (typeof file.includeCommitsMentionedByReflogs !== 'undefined' && typeof file.includeCommitsMentionedByReflogs !== 'boolean') {
return 'includeCommitsMentionedByReflogs';
}
if (typeof file.issueLinkingConfig !== 'undefined' && (typeof file.issueLinkingConfig !== 'object' || file.issueLinkingConfig === null || typeof file.issueLinkingConfig.issue !== 'string' || typeof file.issueLinkingConfig.url !== 'string')) {
return 'issueLinkingConfig';
}
if (typeof file.name !== 'undefined' && typeof file.name !== 'string') {
return 'name';
}
if (typeof file.onlyFollowFirstParent !== 'undefined' && typeof file.onlyFollowFirstParent !== 'boolean') {
return 'onlyFollowFirstParent';
}
if (typeof file.onRepoLoadShowCheckedOutBranch !== 'undefined' && typeof file.onRepoLoadShowCheckedOutBranch !== 'boolean') {
return 'onRepoLoadShowCheckedOutBranch';
}
if (typeof file.onRepoLoadShowSpecificBranches !== 'undefined' && (!Array.isArray(file.onRepoLoadShowSpecificBranches) || file.onRepoLoadShowSpecificBranches.some((branch) => typeof branch !== 'string'))) {
return 'onRepoLoadShowSpecificBranches';
}
if (typeof file.pullRequestConfig !== 'undefined' && (
typeof file.pullRequestConfig !== 'object' ||
file.pullRequestConfig === null ||
(
file.pullRequestConfig.provider !== ExternalRepoConfig.PullRequestProvider.Bitbucket &&
(file.pullRequestConfig.provider !== ExternalRepoConfig.PullRequestProvider.Custom || typeof file.pullRequestConfig.custom !== 'object' || file.pullRequestConfig.custom === null || typeof file.pullRequestConfig.custom.name !== 'string' || typeof file.pullRequestConfig.custom.templateUrl !== 'string') &&
file.pullRequestConfig.provider !== ExternalRepoConfig.PullRequestProvider.GitHub &&
file.pullRequestConfig.provider !== ExternalRepoConfig.PullRequestProvider.GitLab
) ||
typeof file.pullRequestConfig.hostRootUrl !== 'string' ||
typeof file.pullRequestConfig.sourceRemote !== 'string' ||
typeof file.pullRequestConfig.sourceOwner !== 'string' ||
typeof file.pullRequestConfig.sourceRepo !== 'string' ||
(typeof file.pullRequestConfig.destRemote !== 'string' && file.pullRequestConfig.destRemote !== null) ||
typeof file.pullRequestConfig.destOwner !== 'string' ||
typeof file.pullRequestConfig.destRepo !== 'string' ||
typeof file.pullRequestConfig.destProjectId !== 'string' ||
typeof file.pullRequestConfig.destBranch !== 'string'
)) {
return 'pullRequestConfig';
}
if (typeof file.showRemoteBranches !== 'undefined' && typeof file.showRemoteBranches !== 'boolean') {
return 'showRemoteBranches';
}
if (typeof file.showStashes !== 'undefined' && typeof file.showStashes !== 'boolean') {
return 'showStashes';
}
if (typeof file.showTags !== 'undefined' && typeof file.showTags !== 'boolean') {
return 'showTags';
}
return null;
}
/**
* Apply the configuration provided in an external configuration file to a repository state.
* @param file The file to apply.
* @param state The state to be updated.
*/
function applyExternalConfigFile(file: Readonly<ExternalRepoConfig.File>, state: GitRepoState) {
if (typeof file.commitOrdering !== 'undefined') {
state.commitOrdering = file.commitOrdering;
}
if (typeof file.fileViewType !== 'undefined') {
switch (file.fileViewType) {
case ExternalRepoConfig.FileViewType.Tree:
state.fileViewType = FileViewType.Tree;
break;
case ExternalRepoConfig.FileViewType.List:
state.fileViewType = FileViewType.List;
break;
}
}
if (typeof file.hideRemotes !== 'undefined') {
state.hideRemotes = file.hideRemotes;
}
if (typeof file.includeCommitsMentionedByReflogs !== 'undefined') {
state.includeCommitsMentionedByReflogs = file.includeCommitsMentionedByReflogs ? BooleanOverride.Enabled : BooleanOverride.Disabled;
}
if (typeof file.issueLinkingConfig !== 'undefined') {
state.issueLinkingConfig = {
issue: file.issueLinkingConfig.issue,
url: file.issueLinkingConfig.url
};
}
if (typeof file.name !== 'undefined') {
state.name = file.name;
}
if (typeof file.onlyFollowFirstParent !== 'undefined') {
state.onlyFollowFirstParent = file.onlyFollowFirstParent ? BooleanOverride.Enabled : BooleanOverride.Disabled;
}
if (typeof file.onRepoLoadShowCheckedOutBranch !== 'undefined') {
state.onRepoLoadShowCheckedOutBranch = file.onRepoLoadShowCheckedOutBranch ? BooleanOverride.Enabled : BooleanOverride.Disabled;
}
if (typeof file.onRepoLoadShowSpecificBranches !== 'undefined') {
state.onRepoLoadShowSpecificBranches = file.onRepoLoadShowSpecificBranches;
}
if (typeof file.pullRequestConfig !== 'undefined') {
let provider: PullRequestProvider;
switch (file.pullRequestConfig.provider) {
case ExternalRepoConfig.PullRequestProvider.Bitbucket:
provider = PullRequestProvider.Bitbucket;
break;
case ExternalRepoConfig.PullRequestProvider.Custom:
provider = PullRequestProvider.Custom;
break;
case ExternalRepoConfig.PullRequestProvider.GitHub:
provider = PullRequestProvider.GitHub;
break;
case ExternalRepoConfig.PullRequestProvider.GitLab:
provider = PullRequestProvider.GitLab;
break;
}
state.pullRequestConfig = <PullRequestConfig>{
provider: provider,
custom: provider === PullRequestProvider.Custom
? {
name: file.pullRequestConfig.custom!.name,
templateUrl: file.pullRequestConfig.custom!.templateUrl
}
: null,
hostRootUrl: file.pullRequestConfig.hostRootUrl,
sourceRemote: file.pullRequestConfig.sourceRemote,
sourceOwner: file.pullRequestConfig.sourceOwner,
sourceRepo: file.pullRequestConfig.sourceRepo,
destRemote: file.pullRequestConfig.destRemote,
destOwner: file.pullRequestConfig.destOwner,
destRepo: file.pullRequestConfig.destRepo,
destProjectId: file.pullRequestConfig.destProjectId,
destBranch: file.pullRequestConfig.destBranch
};
}
if (typeof file.showRemoteBranches !== 'undefined') {
state.showRemoteBranchesV2 = file.showRemoteBranches ? BooleanOverride.Enabled : BooleanOverride.Disabled;
}
if (typeof file.showStashes !== 'undefined') {
state.showStashes = file.showStashes ? BooleanOverride.Enabled : BooleanOverride.Disabled;
}
if (typeof file.showTags !== 'undefined') {
state.showTags = file.showTags ? BooleanOverride.Enabled : BooleanOverride.Disabled;
}
} | the_stack |
import BlocksoftAxios from '../../common/BlocksoftAxios'
import BlocksoftCryptoLog from '../../common/BlocksoftCryptoLog'
import BlocksoftUtils from '../../common/BlocksoftUtils'
import { BlocksoftBlockchainTypes } from '../BlocksoftBlockchainTypes'
import TronUtils from './ext/TronUtils'
import TrxTronscanProvider from './basic/TrxTronscanProvider'
import TrxTrongridProvider from './basic/TrxTrongridProvider'
import TrxSendProvider from '@crypto/blockchains/trx/providers/TrxSendProvider'
import BlocksoftDispatcher from '../BlocksoftDispatcher'
import config from '@app/config/config'
import { strings, sublocale } from '@app/services/i18n'
import settingsActions from '@app/appstores/Stores/Settings/SettingsActions'
import MarketingEvent from '@app/services/Marketing/MarketingEvent'
import BlocksoftTransactions from '@crypto/actions/BlocksoftTransactions/BlocksoftTransactions'
import BlocksoftExternalSettings from '@crypto/common/BlocksoftExternalSettings'
// https://developers.tron.network/docs/parameter-and-return-value-encoding-and-decoding
const ethers = require('ethers')
const ADDRESS_PREFIX_REGEX = /^(41)/
const AbiCoder = ethers.utils.AbiCoder
export default class TrxTransferProcessor implements BlocksoftBlockchainTypes.TransferProcessor {
private _settings: any
private _tronscanProvider: TrxTronscanProvider
private _trongridProvider: TrxTrongridProvider
private _tokenName: string
private _isToken20: boolean
private sendProvider: TrxSendProvider
constructor(settings: any) {
this._settings = settings
this._tronscanProvider = new TrxTronscanProvider()
this._trongridProvider = new TrxTrongridProvider()
this._tokenName = '_'
this._isToken20 = false
if (typeof settings.tokenName !== 'undefined') {
this._tokenName = settings.tokenName
if (this._tokenName[0] === 'T') {
this._isToken20 = true
}
}
this.sendProvider = new TrxSendProvider(this._settings, 'TRX')
}
needPrivateForFee(): boolean {
return false
}
checkSendAllModal(data: { currencyCode: any }): boolean {
return false
}
async checkTransferHasError(data: BlocksoftBlockchainTypes.CheckTransferHasErrorData): Promise<BlocksoftBlockchainTypes.CheckTransferHasErrorResult> {
// @ts-ignore
if (!this._isToken20 || data.amount && data.amount * 1 > 0) {
return { isOk: true }
}
/**
* @type {TrxScannerProcessor}
*/
const balanceProvider = BlocksoftDispatcher.getScannerProcessor(this._settings.currencyCode)
const balanceRaw = await balanceProvider.getBalanceBlockchain(data.addressTo)
if (balanceRaw && typeof balanceRaw.balance !== 'undefined' && balanceRaw.balance > 0) {
return { isOk: true }
}
const balanceProviderBasic = BlocksoftDispatcher.getScannerProcessor('TRX')
const balanceRawBasic = await balanceProviderBasic.getBalanceBlockchain(data.addressTo)
if (balanceRawBasic && typeof balanceRawBasic.balance !== 'undefined' && balanceRawBasic.balance > 0) {
return { isOk: true }
}
const transactionsBasic = await balanceProviderBasic.getTransactionsBlockchain({ account: { address: data.addressTo } })
if (transactionsBasic !== false) {
return { isOk: true }
}
return { isOk: false, code: 'TRX_20', address: data.addressTo }
}
async getFeeRate(data: BlocksoftBlockchainTypes.TransferData, privateData: BlocksoftBlockchainTypes.TransferPrivateData, additionalData: {} = {}): Promise<BlocksoftBlockchainTypes.FeeRateResult> {
const result: BlocksoftBlockchainTypes.FeeRateResult = {
selectedFeeIndex: -3,
shouldShowFees: false
} as BlocksoftBlockchainTypes.FeeRateResult
const addressHexTo = TronUtils.addressToHex(data.addressTo)
if (TronUtils.addressHexToStr(addressHexTo) !== data.addressTo) {
BlocksoftCryptoLog.log('TrxTransferProcessor.getFeeRate check address ' + data.addressTo + ' hex ' + addressHexTo + ' => ' + TronUtils.addressHexToStr(addressHexTo))
throw new Error('TRX SYSTEM ERROR - Please check address ' + data.addressTo)
}
try {
const sendLink = BlocksoftExternalSettings.getStatic('TRX_SEND_LINK')
const link = sendLink + '/wallet/getaccountresource'
let feeForTx = 0
try {
const res = await BlocksoftAxios.post(link, { address: TronUtils.addressToHex(data.addressFrom) })
const tronData = res.data
delete tronData.assetNetUsed
delete tronData.assetNetLimit
tronData.netRemaining = typeof tronData.NetLimit !== 'undefined' ? (tronData.NetLimit * 1 - tronData.NetUsed * 1) : (tronData.freeNetLimit * 1 - tronData.freeNetUsed * 1)
tronData.energyRemaining = typeof tronData.EnergyLimit !== 'undefined' ? (tronData.EnergyLimit * 1 - tronData.EnergyUsed * 1) : 0
await BlocksoftCryptoLog.log(this._settings.currencyCode + ' TrxTransferProcessor.getFeeRate result ' + link + ' from ' + data.addressFrom, tronData)
if (this._tokenName[0] === 'T') {
if (tronData.netRemaining <= 0) {
feeForTx = 49000
} else {
const diffB = 350 - tronData.netRemaining
if (diffB > 0) {
feeForTx = BlocksoftUtils.mul(49000, BlocksoftUtils.div(diffB, 350))
}
}
if (tronData.energyRemaining <= 0 ) {
feeForTx = feeForTx * 1 + 8296680
} else {
const diffE = 59262 - tronData.energyRemaining
if (diffE > 0) {
feeForTx = feeForTx * 1 + BlocksoftUtils.mul( 8296680, BlocksoftUtils.div(diffE / 59262)) * 1
}
}
await BlocksoftCryptoLog.log(this._settings.currencyCode + ' TrxTransferProcessor.getFeeRate feeForTx ' + feeForTx)
} else {
// @ts-ignore
if (tronData.netRemaining <= 0) {
feeForTx = 100000
}
}
} catch (e) {
// do nothing
if (config.debug.cryptoErrors) {
console.log(this._settings.currencyCode + ' TrxTransferProcessor.getFeeRate addressFrom data error ' + e.message)
}
BlocksoftCryptoLog.log(this._settings.currencyCode + ' TrxTransferProcessor.getFeeRate addressFrom data error ' + e.message)
}
if (typeof data.dexOrderData === 'undefined' || !data.dexOrderData) {
try {
const res2 = await BlocksoftAxios.post(link, { address: addressHexTo })
const tronData2 = res2.data
delete tronData2.assetNetUsed
delete tronData2.assetNetLimit
await BlocksoftCryptoLog.log(this._settings.currencyCode + ' TrxTransferProcessor.getFeeRate result ' + link + ' to ' + data.addressTo, tronData2)
if (typeof tronData2.freeNetLimit === 'undefined') {
feeForTx = feeForTx * 1 + 1000000
}
} catch (e) {
// do nothing
if (config.debug.cryptoErrors) {
console.log(this._settings.currencyCode + ' TrxTransferProcessor.getFeeRate addressTo data error ' + e.message)
}
BlocksoftCryptoLog.log(this._settings.currencyCode + ' TrxTransferProcessor.getFeeRate addressTo data error ' + e.message)
}
}
if (feeForTx !== 0) {
result.fees = [
{
langMsg: 'xrp_speed_one',
feeForTx: Math.round(feeForTx).toString(),
amountForTx: data.amount
}
]
/*
if (res.data.balance * 1 < feeForTx * 1) {
throw new Error('SERVER_RESPONSE_BANDWITH_ERROR_TRX')
}
*/
result.selectedFeeIndex = 0
}
} catch (e) {
if (e.message.indexOf('SERVER_RESPONSE_') === 0) {
throw e
}
if (config.debug.cryptoErrors) {
console.log(this._settings.currencyCode + ' TrxTransferProcessor.getFeeRate error ' + e.message)
}
BlocksoftCryptoLog.log(this._settings.currencyCode + ' TrxTransferProcessor.getFeeRate error ' + e.message)
}
return result
}
async getTransferAllBalance(data: BlocksoftBlockchainTypes.TransferData, privateData: BlocksoftBlockchainTypes.TransferPrivateData, additionalData: BlocksoftBlockchainTypes.TransferAdditionalData = {}): Promise<BlocksoftBlockchainTypes.TransferAllBalanceResult> {
const balance = data.amount
// @ts-ignore
await BlocksoftCryptoLog.log(this._settings.currencyCode + ' TrxTransferProcessor.getTransferAllBalance ', data.addressFrom + ' => ' + balance)
// noinspection EqualityComparisonWithCoercionJS
if (balance === '0') {
return {
selectedTransferAllBalance: '0',
selectedFeeIndex: -1,
fees: [],
shouldShowFees: false,
countedForBasicBalance: '0'
}
}
const fees = await this.getFeeRate(data, privateData, additionalData)
if (!fees || fees.selectedFeeIndex < 0) {
return {
selectedTransferAllBalance: balance,
selectedFeeIndex: -3,
fees: [],
shouldShowFees: false,
countedForBasicBalance: balance
}
}
return {
...fees,
shouldShowFees: false,
selectedTransferAllBalance: fees.fees[fees.selectedFeeIndex].amountForTx
}
}
/**
* https://developers.tron.network/reference#walletcreatetransaction
* https://developers.tron.network/docs/trc20-introduction#section-8usdt-transfer
*/
async sendTx(data: BlocksoftBlockchainTypes.TransferData, privateData: BlocksoftBlockchainTypes.TransferPrivateData, uiData: BlocksoftBlockchainTypes.TransferUiData): Promise<BlocksoftBlockchainTypes.SendTxResult> {
if (typeof privateData.privateKey === 'undefined') {
throw new Error('TRX transaction required privateKey')
}
await BlocksoftCryptoLog.log(this._settings.currencyCode + ' TrxTransferProcessor.sendTx started ' + data.addressFrom + ' => ' + data.addressTo)
const logData = {}
logData.currencyCode = this._settings.currencyCode
logData.selectedFee = uiData.selectedFee
logData.from = data.addressFrom
logData.basicAddressTo = data.addressTo
logData.basicAmount = data.amount
logData.pushLocale = sublocale()
logData.pushSetting = await settingsActions.getSetting('transactionsNotifs')
logData.basicToken = this._tokenName
const sendLink = BlocksoftExternalSettings.getStatic('TRX_SEND_LINK')
let tx
if (typeof data.blockchainData !== 'undefined' && data.blockchainData) {
tx = data.blockchainData
} else {
let link, res, params
if (typeof data.dexOrderData !== 'undefined' && data.dexOrderData) {
// {"tokenContract":"41a2726afbecbd8e936000ed684cef5e2f5cf43008","contractMethod":"trxToTokenSwapInput(uint256)","options":{"callValue":"1000000"},"params":[{"type":"uint256","value":"116256"}]}
let ownerAddress
const abiCoder = new AbiCoder()
try {
ownerAddress = TronUtils.addressToHex(data.addressFrom)
} catch (e) {
e.message += ' inside TronUtils.addressToHex owner_address ' + data.addressFrom
throw e
}
const link = sendLink + '/wallet/triggersmartcontract'
const total = data.dexOrderData.length
let index = 0
for (const order of data.dexOrderData) {
index++
let parameter = ''
if (order.params) {
const types = []
const values = []
try {
for (const tmp of order.params) {
let type, value
try {
type = tmp.type
value = tmp.value
if (type === 'address') {
value = TronUtils.addressToHex(value).replace(ADDRESS_PREFIX_REGEX, '0x')
} else if (type === 'address[]') {
value = value.map(v => TronUtils.addressToHex(v).replace(ADDRESS_PREFIX_REGEX, '0x'))
}
types.push(type)
values.push(value)
} catch (e) {
throw new Error(e.message + ' type ' + type + ' tmp.value ' + tmp.value + ' value ' + value)
}
}
parameter = abiCoder.encode(types, values).replace(/^(0x)/, '')
} catch (e) {
throw new Error(e.message + ' in abiCoder')
}
}
let params
try {
params = {
owner_address: ownerAddress,
contract_address: order.tokenContract,
function_selector: order.contractMethod,
// @ts-ignore
parameter,
fee_limit: 100000000
}
if (typeof order.options !== 'undefined' && typeof order.options.callValue !== 'undefined') {
params.call_value = order.options.callValue * 1
}
} catch (e1) {
throw new Error(e1.message + ' in params build')
}
if (index < total) {
res = await BlocksoftAxios.post(link, params)
tx = res.data.transaction
await BlocksoftCryptoLog.log(this._settings.currencyCode + ' TrxTxProcessor.sendSubTx tx', tx)
tx.signature = [TronUtils.ECKeySign(Buffer.from(tx.txID, 'hex'), Buffer.from(privateData.privateKey, 'hex'))]
await BlocksoftCryptoLog.log(this._settings.currencyCode + ' TrxTxProcessor.sendSubTx signed', tx)
let resultSub = {} as BlocksoftBlockchainTypes.SendTxResult
try {
resultSub = await this.sendProvider.sendTx(tx, '', false, logData)
await BlocksoftCryptoLog.log(this._settings.currencyCode + ' TrxTxProcessor.sendSubTx broadcasted')
} catch (e) {
if (config.debug.cryptoErrors) {
console.log(this._settings.currencyCode + ' TrxTransferProcessor.sendSubTx error', e, uiData)
}
BlocksoftCryptoLog.log(this._settings.currencyCode + ' TrxTransferProcessor.sendSubTx error ' + e.message)
// noinspection ES6MissingAwait
MarketingEvent.logOnlyRealTime('v20_trx_tx_sub_error ' + this._settings.currencyCode + ' ' + data.addressFrom + ' => ' + data.addressTo + ' ' + e.message, logData)
throw e
}
const linkRecheck = sendLink + '/wallet/gettransactioninfobyid'
let checks = 0
let mined = false
do {
checks++
try {
const recheck = await BlocksoftAxios.post(linkRecheck, {
value: tx.txID
})
if (typeof recheck.data !== 'undefined') {
if (typeof recheck.data.id !== 'undefined' && typeof recheck.data.blockNumber !== 'undefined'
&& typeof recheck.data.receipt !== 'undefined' && typeof recheck.data.receipt.result !== 'undefined'
) {
// @ts-ignore
BlocksoftCryptoLog.log(this._settings.currencyCode + ' TrxTransferProcessor.sendSubTx recheck ', {
id: recheck.data.id,
blockNumber: recheck.data.blockNumber,
receipt: recheck.data.receipt
})
mined = true
const minedStatus = recheck.data.receipt.result.toUpperCase()
if (minedStatus === 'OUT_OF_ENERGY') {
strings(`account.transactionStatuses.out_of_energy`)
} else if (minedStatus === 'FAILED') {
strings(`account.transactionStatuses.fail`)
} else if (minedStatus !== 'SUCCESS') {
throw new Error('Bad tx status ' + JSON.stringify(recheck.data.receipt))
}
break
}
}
} catch (e1) {
if (config.debug.cryptoErrors) {
console.log(this._settings.currencyCode + ' TRX transaction recheck error ', e1)
}
BlocksoftCryptoLog.log(this._settings.currencyCode + ' TRX transaction recheck error ' + e1.message)
}
} while (checks < 100 && !mined)
} else {
res = await BlocksoftAxios.post(link, params)
}
}
} else {
if (typeof data.addressTo === 'undefined') {
throw new Error('TRX transaction required addressTo')
}
if (data.addressFrom === data.addressTo) {
throw new Error('SERVER_RESPONSE_SELF_TX_FORBIDDEN')
}
// check error
await this.getFeeRate(data, privateData)
let toAddress, ownerAddress
try {
toAddress = TronUtils.addressToHex(data.addressTo)
} catch (e) {
e.message += ' inside TronUtils.addressToHex to_address ' + data.addressTo
throw e
}
if (TronUtils.addressHexToStr(toAddress) !== data.addressTo) {
BlocksoftCryptoLog.log('TrxTransferProcessor.sendTx heck address ' + data.addressTo + ' hex ' + toAddress + ' => ' + TronUtils.addressHexToStr(toAddress))
throw new Error('TRX SYSTEM ERROR - Please check address ' + data.addressTo)
}
try {
ownerAddress = TronUtils.addressToHex(data.addressFrom)
} catch (e) {
e.message += ' inside TronUtils.addressToHex owner_address ' + data.addressFrom
throw e
}
if (this._tokenName[0] === 'T') {
link = sendLink + '/wallet/triggersmartcontract'
const parameter = '0000000000000000000000' + toAddress.toUpperCase() + '00000000000000000000000000000000000000000000' + BlocksoftUtils.decimalToHex(BlocksoftUtils.round(data.amount), 20)
params = {
owner_address: ownerAddress,
contract_address: TronUtils.addressToHex(this._tokenName),
function_selector: 'transfer(address,uint256)',
parameter,
fee_limit: 100000000,
call_value: 0
}
await BlocksoftCryptoLog.log(this._settings.currencyCode + ' TrxTransferProcessor.sendTx inited1' + data.addressFrom + ' => ' + data.addressTo + ' ' + link, params)
res = await BlocksoftAxios.post(link, params)
} else {
params = {
owner_address: ownerAddress,
to_address: toAddress,
// @ts-ignore
amount: BlocksoftUtils.round(data.amount) * 1
}
if (this._tokenName === '_') {
link = sendLink + '/wallet/createtransaction'
} else {
// @ts-ignore
params.asset_name = '0x' + Buffer.from(this._tokenName).toString('hex')
link = sendLink + '/wallet/transferasset'
}
try {
await BlocksoftCryptoLog.log(this._settings.currencyCode + ' TrxTransferProcessor.sendTx inited2 ' + data.addressFrom + ' => ' + data.addressTo + ' ' + link, params)
res = await BlocksoftAxios.post(link, params)
} catch (e) {
await BlocksoftCryptoLog.log(this._settings.currencyCode + ' TrxTransferProcessor.sendTx result2' + data.addressFrom + ' => ' + data.addressTo + ' ' + link + ' ' + e.message)
if (e.message.indexOf('timeout of') !== -1 || e.message.indexOf('network') !== -1) {
throw new Error('SERVER_RESPONSE_NOT_CONNECTED')
} else {
throw e
}
}
}
}
// @ts-ignore
if (typeof res.data.Error !== 'undefined') {
await BlocksoftCryptoLog.log(this._settings.currencyCode + ' TrxTransferProcessor.sendTx error ' + data.addressFrom + ' => ' + data.addressTo + ' ', res.data)
// @ts-ignore
this.sendProvider.trxError(res.data.Error.message || res.data.Error)
}
// @ts-ignore
tx = res.data
if ((typeof data.dexOrderData !== 'undefined' && data.dexOrderData) || (this._tokenName[0] === 'T')) {
// @ts-ignore
if (typeof res.data.transaction === 'undefined' || typeof res.data.result === 'undefined') {
// @ts-ignore
if (typeof res.data.result.message !== 'undefined') {
// @ts-ignore
res.data.result.message = BlocksoftUtils.hexToUtf('0x' + res.data.result.message)
}
// @ts-ignore
this.sendProvider.trxError('No tx in contract data ' + JSON.stringify(res.data))
}
// @ts-ignore
tx = res.data.transaction
} else {
// @ts-ignore
if (typeof res.data.txID === 'undefined') {
// @ts-ignore
if (typeof res.data.result.message !== 'undefined') {
// @ts-ignore
res.data.result.message = BlocksoftUtils.hexToUtf('0x' + res.data.result.message)
}
// @ts-ignore
this.sendProvider.trxError('No txID in data ' + JSON.stringify(res.data))
}
}
}
await BlocksoftCryptoLog.log(this._settings.currencyCode + ' TrxTxProcessor.sendTx token ' + this._tokenName + ' tx', tx)
tx.signature = [TronUtils.ECKeySign(Buffer.from(tx.txID, 'hex'), Buffer.from(privateData.privateKey, 'hex'))]
if (typeof uiData !== 'undefined' && typeof uiData.selectedFee !== 'undefined' && typeof uiData.selectedFee.rawOnly !== 'undefined' && uiData.selectedFee.rawOnly) {
return { rawOnly: uiData.selectedFee.rawOnly, raw : JSON.stringify(tx)}
}
await BlocksoftCryptoLog.log(this._settings.currencyCode + ' TrxTxProcessor.sendTx signed', tx)
let result = {} as BlocksoftBlockchainTypes.SendTxResult
try {
result = await this.sendProvider.sendTx(tx, '', false, logData)
await BlocksoftCryptoLog.log(this._settings.currencyCode + ' TrxTxProcessor.sendTx broadcasted')
} catch (e) {
if (config.debug.cryptoErrors) {
console.log(this._settings.currencyCode + ' TrxTransferProcessor.sendTx error', e, uiData)
}
BlocksoftCryptoLog.log(this._settings.currencyCode + ' TrxTransferProcessor.sendTx error ' + e.message)
// noinspection ES6MissingAwait
MarketingEvent.logOnlyRealTime('v20_trx_tx_error ' + this._settings.currencyCode + ' ' + data.addressFrom + ' => ' + data.addressTo + ' ' + e.message, logData)
throw e
}
// noinspection ES6MissingAwait
MarketingEvent.logOnlyRealTime('v20_trx_tx_success ' + this._settings.currencyCode + ' ' + data.addressFrom + ' => ' + data.addressTo, logData)
await (BlocksoftTransactions.resetTransactionsPending({ account: { currencyCode: 'TRX' } }, 'AccountRunPending'))
if (config.debug.cryptoErrors) {
console.log(this._settings.currencyCode + ' TrxTransferProcessor.sendTx result', JSON.parse(JSON.stringify(result)))
}
return result
}
} | the_stack |
import React, {useMemo, useState} from 'react'
import {Helmet} from 'react-helmet'
import {Steps, Row, Button, Card, Select, Typography, Checkbox, Modal, Divider, Spin} from 'antd';
import {CheckCircleOutlined} from "@ant-design/icons";
import {useApplication} from "../context/ApplicationProvider";
import {ControlService} from "../api/control";
import {handleAPIError, handleAPIResponse} from "../utils/errors";
import {badgedOption} from "../components/tags/BadgedOption";
import {TaskState} from "../components/tags/TaskState";
import {MetricsService} from "../api/metrics";
const {Step} = Steps;
const Option = Select.Option;
const {confirm} = Modal;
const loadingIndicator = <Row justify="center" align="middle" style={{width: "100%"}}><Spin size="small"/></Row>;
const ControlPage = () => {
const [current, setCurrent] = useState(0);
const [command, setCommand] = useState<string>("revoke");
const {currentApp, currentEnv} = useApplication();
const service = new ControlService();
const metricsService = new MetricsService();
const [broadcasting, setBroadcasting] = useState<boolean>();
const [seenTasks, setSeenTasks] = useState([]);
const [taskName, setTaskName] = useState<string>();
const [terminate, setTerminate] = useState<boolean>(false);
const [signal, setSignal] = useState<string>("SIGTERM");
const [revocationCount, setRevocationCount] = useState<number>(0);
const [seenTasksFetching, setSeenTasksFetching] = useState<boolean>();
const next = () => {
if (command === "revoke" && current === 1)
revoke("true").then(() => {
setCurrent(current + 1);
})
else
setCurrent(current + 1);
};
const prev = () => {
setCurrent(current - 1);
};
const memoizedTaskNameOptions = useMemo(() => {
// memoize this because it's common to have many different task names, which causes the dropdown to be very laggy.
// This is a known problem in Ant Design
return seenTasks.map((task, key) => badgedOption(task))
}, [seenTasks])
function revoke(dry_run) {
if (!currentApp || !currentEnv || !taskName) return;
setBroadcasting(true);
return service.revokeTasksByName(currentApp, currentEnv, taskName, terminate, signal, dry_run)
.then(handleAPIResponse)
.then((result: any) => {
setRevocationCount(result.revocation_count);
if (dry_run !== "true") {
setCurrent(0)
pendingRevocation(result)
}
}, handleAPIError)
.catch(handleAPIError)
.finally(() => setBroadcasting(false));
}
function broadcastCommand() {
if (command === "revoke")
revoke("false")
}
function pendingRevocation(result) {
confirm({
title: "Tasks pending revocation!",
icon: <CheckCircleOutlined style={{color: "#00BFA6"}}/>,
content: <>
<Typography.Paragraph>Revocation command queued
for {result.revocation_count} tasks!</Typography.Paragraph>
</>,
okText: "Ok",
cancelButtonProps: {style: {display: 'none'}}
});
}
function getSeenTasks(open) {
if (!currentApp || !open) return;
setSeenTasksFetching(true);
metricsService.getSeenTasks(currentApp, currentEnv, {})
.then(handleAPIResponse)
.then((result: any) => {
setSeenTasks(result.aggregations.seen_tasks.buckets);
}, handleAPIError)
.catch(handleAPIError)
.finally(() => setSeenTasksFetching(false));
}
return (
<>
<Helmet
title="Control"
meta={[
{name: 'description', content: 'Control commands'},
{name: 'keywords', content: 'celery, tasks'},
]}
>
<html lang="en"/>
</Helmet>
{/* Steps */}
<Row style={{marginTop: 20}}>
<Card style={{width: "100%"}}>
<Steps current={current}>
<Step title="Command" description="Choose command"/>
<Step title="Setup" description="Setup command args"/>
<Step title="Broadcast" description="Broadcast command"/>
</Steps>
</Card>
</Row>
{/* Tabs Containers */}
<Row style={{marginTop: 20, marginBottom: 20}}>
<Card style={{width: "100%", alignItems: "center"}}>
{current == 0 && (
<Row justify="center" style={{width: "100%"}}>
<Row style={{width: "100%"}} justify="center">
<Typography.Title level={5}>
What control command you want to broadcast?
</Typography.Title>
</Row>
<Row style={{width: "100%"}} justify="center">
<Select style={{width: "200"}} defaultValue="revoke"
onSelect={value => setCommand(value)}>
<Option value="revoke">Revoke</Option>
</Select>
</Row>
</Row>
)}
{current == 1 && command === "revoke" && (
<Row justify="center" style={{width: "100%"}}>
<Typography.Paragraph>
Revoking tasks works by sending a broadcast message to all the workers, the workers then
keep a list of revoked tasks in memory. When a worker receives a task in the list, it
will skip executing the task.
</Typography.Paragraph>
<Select placeholder="Task name"
style={{width: "100%"}}
allowClear
showSearch
dropdownMatchSelectWidth={false}
onDropdownVisibleChange={getSeenTasks}
notFoundContent={seenTasksFetching ? loadingIndicator : null}
// @ts-ignore
onSelect={value => setTaskName(value)}
>
{memoizedTaskNameOptions}
</Select>
<Row align="middle" style={{marginTop: 16, width: "100%"}}>
<Checkbox onChange={e => setTerminate(e.target.checked)}> Terminate already started
tasks with</Checkbox>
<Select style={{width: 90}}
// @ts-ignore
onSelect={value => setSignal(value)}
defaultValue="SIGTERM"
>
<Option value="SIGTERM">SIGTERM</Option>
<Option value="SIGKILL">SIGKILL</Option>
</Select>
</Row>
<Row justify="start" style={{width: "100%", marginTop: 10}}>
<Typography.Paragraph type="secondary">
The worker won’t terminate an already executing task unless the terminate option
is set.
</Typography.Paragraph>
</Row>
<Divider/>
<Row justify="start" style={{width: "100%"}}>
<Typography.Text type="secondary">
<Typography.Text strong type="warning">Caveats:</Typography.Text>
<ul>
<li>
When a worker starts up it will synchronize revoked tasks with other workers
in the cluster unless you have disabled synchronization using worker arg
<Typography.Text code>--without-mingle</Typography.Text>.
</li>
<li>
If The list of revoked tasks is in-memory and if all workers restart the
list of revoked ids will also vanish. If you want to preserve this list
between restarts you need to specify a file for these to be stored in by
using the <Typography.Text code>–statedb</Typography.Text> argument to
celery worker.
</li>
</ul>
</Typography.Text>
</Row>
</Row>
)}
{current == 2 && command === "revoke" && (
<>
<Row justify="center" style={{width: "100%"}}>
<Typography.Paragraph>
Found <Typography.Text code>{revocationCount}</Typography.Text> pending ( <TaskState
state="QUEUED"/> <TaskState state="RECEIVED"/> <TaskState state="STARTED"/>)
instances of
task <Typography.Text code>{taskName}</Typography.Text>.
Are you sure you want to revoke them all?
</Typography.Paragraph>
</Row>
{terminate &&
<Row justify="center" style={{width: "100%"}}>
<Typography.Paragraph type="secondary">
If an instance is already <TaskState state="STARTED"/> it will be terminated
using <Typography.Text
code>{signal}</Typography.Text> signal!
</Typography.Paragraph>
</Row>
}
</>
)}
</Card>
</Row>
{/* Controls */}
<Row justify="end">
{current > 0 && (
<Button style={{margin: '0 8px'}} onClick={() => prev()}>
Previous
</Button>
)}
{current < 2 && (
<Button type="primary" onClick={() => next()}>
Next
</Button>
)}
{current === 2 && (
<Button type="primary" onClick={broadcastCommand} loading={broadcasting}>
Broadcast
</Button>
)}
</Row>
</>
)
};
export default ControlPage | the_stack |
import { expect } from 'chai';
import { deployMockContract } from 'ethereum-waffle';
import { describeFilter } from '@solidstate/library';
import { describeBehaviorOfERC165 } from '../../introspection';
import { DiamondCuttable } from '../../../typechain';
import { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers';
import { ethers } from 'hardhat';
interface DiamondCuttableBehaviorArgs {
deploy: () => Promise<DiamondCuttable>;
getOwner: () => Promise<SignerWithAddress>;
getNonOwner: () => Promise<SignerWithAddress>;
}
export function describeBehaviorOfDiamondCuttable(
{ deploy, getOwner, getNonOwner }: DiamondCuttableBehaviorArgs,
skips?: string[],
) {
const describe = describeFilter(skips);
describe('::DiamondCuttable', function () {
let owner: SignerWithAddress;
let nonOwner: SignerWithAddress;
const functions: string[] = [];
const selectors: string[] = [];
let abi: any;
let facet: any;
let instance: DiamondCuttable;
before(async function () {
owner = await getOwner();
nonOwner = await getNonOwner();
for (let i = 0; i < 24; i++) {
const fn = `fn${i}()`;
functions.push(fn);
selectors.push(
ethers.utils.hexDataSlice(
ethers.utils.solidityKeccak256(['string'], [fn]),
0,
4,
),
);
}
abi = functions.map((fn) => `function ${fn}`);
facet = await deployMockContract(owner, abi);
});
beforeEach(async function () {
instance = await deploy();
});
describeBehaviorOfERC165(
{
deploy: deploy as any,
interfaceIds: ['0x1f931c1c'],
},
skips,
);
describe('#diamondCut', function () {
it('emits DiamondCut event', async function () {
const facets: any = [
{
target: facet.address,
action: 0,
selectors: [ethers.utils.hexlify(ethers.utils.randomBytes(4))],
},
];
const target = ethers.constants.AddressZero;
const data = '0x';
let tx = instance.connect(owner).diamondCut(facets, target, data);
const events = (await (await tx).wait()).events;
const argsResult: any = events![0].args!;
expect(argsResult.facetCuts[0].target).to.eq(facets[0].target);
expect(argsResult.facetCuts[0].action).to.eq(facets[0].action);
expect(argsResult.facetCuts[0].selectors).to.deep.eq(
facets[0].selectors,
);
expect(argsResult.target).to.eq(target);
expect(argsResult.data).to.eq(data);
});
describe('using FacetCutAction ADD', function () {
it('adds facet', async function () {
const contract = new ethers.Contract(
instance.address,
abi,
ethers.provider,
);
for (let fn of functions) {
await expect(contract.callStatic[fn]()).to.be.revertedWith(
'DiamondBase: no facet found for function signature',
);
}
await instance
.connect(owner)
.diamondCut(
[{ target: facet.address, action: 0, selectors }],
ethers.constants.AddressZero,
'0x',
);
for (let fn of functions) {
// call reverts, but with mock-specific message
await expect(contract.callStatic[fn]()).to.be.revertedWith(
'Mock on the method is not initialized',
);
}
});
describe('reverts if', function () {
it('target facet is not a contract', async function () {
await expect(
instance.connect(owner).diamondCut(
[
{
target: ethers.constants.AddressZero,
action: 0,
selectors: [ethers.utils.randomBytes(4)],
},
],
ethers.constants.AddressZero,
'0x',
),
).to.be.revertedWith('DiamondBase: ADD target has no code');
});
it('selector has already been added', async function () {
const facetCuts = [
{
target: facet.address,
action: 0,
selectors: [ethers.utils.randomBytes(4)],
},
];
await instance
.connect(owner)
.diamondCut(facetCuts, ethers.constants.AddressZero, '0x');
await expect(
instance
.connect(owner)
.diamondCut(facetCuts, ethers.constants.AddressZero, '0x'),
).to.be.revertedWith('DiamondBase: selector already added');
});
});
});
describe('using FacetCutAction REPLACE', function () {
it('replaces facet', async function () {
const contract = new ethers.Contract(
instance.address,
abi,
ethers.provider,
);
await instance
.connect(owner)
.diamondCut(
[{ target: facet.address, action: 0, selectors }],
ethers.constants.AddressZero,
'0x',
);
for (let fn of functions) {
// call reverts, but with mock-specific message
await expect(contract.callStatic[fn]()).to.be.revertedWith(
'Mock on the method is not initialized',
);
}
const facetReplacement = await deployMockContract(owner, abi);
for (let fn of functions) {
expect(facetReplacement[fn]).not.to.be.undefined;
}
await instance
.connect(owner)
.diamondCut(
[{ target: facetReplacement.address, action: 1, selectors }],
ethers.constants.AddressZero,
'0x',
);
for (let fn of functions) {
// call reverts, but with mock-specific message
await expect(contract.callStatic[fn]()).to.be.revertedWith(
'Mock on the method is not initialized',
);
}
});
describe('reverts if', function () {
it('target facet is not a contract', async function () {
await expect(
instance.connect(owner).diamondCut(
[
{
target: ethers.constants.AddressZero,
action: 1,
selectors: [ethers.utils.randomBytes(4)],
},
],
ethers.constants.AddressZero,
'0x',
),
).to.be.revertedWith('DiamondBase: REPLACE target has no code');
});
it('selector has not been added', async function () {
await expect(
instance.connect(owner).diamondCut(
[
{
target: facet.address,
action: 1,
selectors: [ethers.utils.randomBytes(4)],
},
],
ethers.constants.AddressZero,
'0x',
),
).to.be.revertedWith('DiamondBase: selector not found');
});
it('selector is immutable', async function () {
const selector = ethers.utils.randomBytes(4);
await instance.connect(owner).diamondCut(
[
{
target: instance.address,
action: 0,
selectors: [selector],
},
],
ethers.constants.AddressZero,
'0x',
);
await expect(
instance.connect(owner).diamondCut(
[
{
target: facet.address,
action: 1,
selectors: [selector],
},
],
ethers.constants.AddressZero,
'0x',
),
).to.be.revertedWith('DiamondBase: selector is immutable');
});
it('replacement facet is same as existing facet', async function () {
const selector = ethers.utils.randomBytes(4);
await instance.connect(owner).diamondCut(
[
{
target: facet.address,
action: 0,
selectors: [selector],
},
],
ethers.constants.AddressZero,
'0x',
);
await expect(
instance.connect(owner).diamondCut(
[
{
target: facet.address,
action: 1,
selectors: [selector],
},
],
ethers.constants.AddressZero,
'0x',
),
).to.be.revertedWith('DiamondBase: REPLACE target is identical');
});
});
});
describe('using FacetCutAction REMOVE', function () {
it('removes facet', async function () {
const contract = new ethers.Contract(
instance.address,
abi,
ethers.provider,
);
await instance
.connect(owner)
.diamondCut(
[{ target: facet.address, action: 0, selectors }],
ethers.constants.AddressZero,
'0x',
);
for (let fn of functions) {
// call reverts, but with mock-specific message
await expect(contract.callStatic[fn]()).to.be.revertedWith(
'Mock on the method is not initialized',
);
}
await instance
.connect(owner)
.diamondCut(
[{ target: ethers.constants.AddressZero, action: 2, selectors }],
ethers.constants.AddressZero,
'0x',
);
for (let fn of functions) {
await expect(contract.callStatic[fn]()).to.be.revertedWith(
'DiamondBase: no facet found for function signature',
);
}
});
describe('reverts if', function () {
it('target address is not zero address', async function () {
await expect(
instance.connect(owner).diamondCut(
[
{
target: instance.address,
action: 2,
selectors: [ethers.utils.randomBytes(4)],
},
],
ethers.constants.AddressZero,
'0x',
),
).to.be.revertedWith(
'DiamondBase: REMOVE target must be zero address',
);
});
it('selector has not been added', async function () {
await expect(
instance.connect(owner).diamondCut(
[
{
target: ethers.constants.AddressZero,
action: 2,
selectors: [ethers.utils.randomBytes(4)],
},
],
ethers.constants.AddressZero,
'0x',
),
).to.be.revertedWith('DiamondBase: selector not found');
});
it('selector is immutable', async function () {
const selector = ethers.utils.randomBytes(4);
await instance.connect(owner).diamondCut(
[
{
target: instance.address,
action: 0,
selectors: [selector],
},
],
ethers.constants.AddressZero,
'0x',
);
await expect(
instance.connect(owner).diamondCut(
[
{
target: ethers.constants.AddressZero,
action: 2,
selectors: [selector],
},
],
ethers.constants.AddressZero,
'0x',
),
).to.be.revertedWith('DiamondBase: selector is immutable');
});
});
});
describe('reverts if', function () {
it('sender is not owner', async function () {
await expect(
instance
.connect(nonOwner)
.diamondCut([], ethers.constants.AddressZero, '0x'),
).to.be.revertedWith('Ownable: sender must be owner');
});
it('passed FacetCutAction is invalid', async function () {
await expect(
instance.connect(owner).diamondCut(
[
{
target: ethers.constants.AddressZero,
action: 3,
selectors: [],
},
],
ethers.constants.AddressZero,
'0x',
),
).to.be.revertedWith(
"Hardhat couldn't infer the reason. Please report this to help us improve Hardhat.",
);
});
it('passed selector array is empty', async function () {
await expect(
instance.connect(owner).diamondCut(
[
{
target: ethers.constants.AddressZero,
action: 0,
selectors: [],
},
],
ethers.constants.AddressZero,
'0x',
),
).to.be.revertedWith('DiamondBase: no selectors specified');
});
it('initialization target is provided but data is not', async function () {
await expect(
instance.connect(owner).diamondCut([], facet.address, '0x'),
).to.be.revertedWith(
'DiamondBase: invalid initialization parameters',
);
});
it('initialization data is provided but target is not', async function () {
await expect(
instance
.connect(owner)
.diamondCut([], ethers.constants.AddressZero, '0x01'),
).to.be.revertedWith(
'DiamondBase: invalid initialization parameters',
);
});
it('initialization target has no code', async function () {
await expect(
instance.connect(owner).diamondCut([], owner.address, '0x01'),
).to.be.revertedWith(
'DiamondBase: initialization target has no code',
);
});
it('initialization function reverts', async function () {
await expect(
instance.connect(owner).diamondCut([], facet.address, '0x01'),
).to.be.revertedWith('Mock on the method is not initialized');
});
});
});
});
} | the_stack |
import {
CdmAttributeItem,
CdmCorpusDefinition,
CdmEntityDefinition,
CdmTraitReference,
CdmTypeAttributeDefinition
} from '../../../internal';
import { testHelper } from '../../testHelper';
import { projectionTestUtils } from '../../Utilities/projectionTestUtils';
/**
* A test class for testing the AddSupportingAttribute operation in a projection and in a resolution guidance
*/
describe('Cdm/Projection/ProjectionAddSupportingAttributeTest', (): void => {
/**
* All possible combinations of the different resolution directives
*/
const resOptsCombinations: string[][] = [
[],
['referenceOnly'],
['normalized'],
['structured'],
['referenceOnly', 'normalized'],
['referenceOnly', 'virtual'],
['referenceOnly', 'structured'],
['normalized', 'structured'],
['normalized', 'structured', 'virtual'],
['referenceOnly', 'normalized', 'structured'],
['referenceOnly', 'normalized', 'structured', 'virtual']
];
/**
* The path between TestDataPath and TestName.
*/
const testsSubpath: string = 'Cdm/Projection/ProjectionAddSupportingAttributeTest';
/**
* AddSupportingAttribute with replaceAsForeignKey operation in the same projection
*/
it('testCombineOpsProj', async () => {
const testName: string = 'testCombineOpsProj';
const entityName: string = 'NewPerson';
const corpus: CdmCorpusDefinition = testHelper.getLocalCorpus(testsSubpath, testName);
for (const resOpt of resOptsCombinations) {
await projectionTestUtils.loadEntityForResolutionOptionAndSave(corpus, testName, testsSubpath, entityName, resOpt);
}
const entity: CdmEntityDefinition = await corpus.fetchObjectAsync<CdmEntityDefinition>(`local:/${entityName}.cdm.json/${entityName}`);
const resolvedEntity: CdmEntityDefinition = await projectionTestUtils.getResolvedEntity(corpus, entity, []);
//// Original set of attributes: ['name', 'age', 'address', 'phoneNumber', 'email']
// Supporting attribute: 'PersonInfo_display', rename 'address' to 'homeAddress'
expect(resolvedEntity.attributes.length)
.toEqual(7);
expect((resolvedEntity.attributes.allItems[0] as CdmTypeAttributeDefinition).name)
.toEqual('name');
expect((resolvedEntity.attributes.allItems[1] as CdmTypeAttributeDefinition).name)
.toEqual('age');
expect((resolvedEntity.attributes.allItems[2] as CdmTypeAttributeDefinition).name)
.toEqual('homeAddress');
expect((resolvedEntity.attributes.allItems[3] as CdmTypeAttributeDefinition).name)
.toEqual('phoneNumber');
expect((resolvedEntity.attributes.allItems[4] as CdmTypeAttributeDefinition).name)
.toEqual('email');
expect((resolvedEntity.attributes.allItems[5] as CdmTypeAttributeDefinition).name)
.toEqual('address');
expect((resolvedEntity.attributes.allItems[6] as CdmTypeAttributeDefinition).name)
.toEqual('PersonInfo_display');
validateInSupportOfAttribute(resolvedEntity.attributes.allItems[6], 'email');
});
/**
* Test AddAttributeGroup operation with a 'referenceOnly' and 'virtual' condition
*/
it('testConditionalProj', async () => {
const testName: string = 'testConditionalProj';
const entityName: string = 'NewPerson';
const corpus: CdmCorpusDefinition = testHelper.getLocalCorpus(testsSubpath, testName);
// for (const resOpt of resOptsCombinations) {
// await projectionTestUtils.loadEntityForResolutionOptionAndSave(corpus, testName, testsSubpath, entityName, resOpt);
// }
const entity: CdmEntityDefinition = await corpus.fetchObjectAsync<CdmEntityDefinition>(`local:/${entityName}.cdm.json/${entityName}`);
const resolvedEntity: CdmEntityDefinition = await projectionTestUtils.getResolvedEntity(corpus, entity, ['referenceOnly']);
//// Original set of attributes: ['name', 'age', 'address', 'phoneNumber', 'email']
//// Condition not met, don't include supporting attribute
expect(resolvedEntity.attributes.length)
.toEqual(5);
expect((resolvedEntity.attributes.allItems[0] as CdmTypeAttributeDefinition).name)
.toEqual('name');
expect((resolvedEntity.attributes.allItems[1] as CdmTypeAttributeDefinition).name)
.toEqual('age');
expect((resolvedEntity.attributes.allItems[2] as CdmTypeAttributeDefinition).name)
.toEqual('address');
expect((resolvedEntity.attributes.allItems[3] as CdmTypeAttributeDefinition).name)
.toEqual('phoneNumber');
expect((resolvedEntity.attributes.allItems[4] as CdmTypeAttributeDefinition).name)
.toEqual('email');
const resolvedEntity2: CdmEntityDefinition = await projectionTestUtils.getResolvedEntity(corpus, entity, ['referenceOnly', 'virtual'])
// Original set of attributes: ['name', 'age', 'address', 'phoneNumber', 'email']
// Condition met, include the supporting attribute
expect(resolvedEntity2.attributes.length)
.toEqual(6);
expect((resolvedEntity2.attributes.allItems[0] as CdmTypeAttributeDefinition).name)
.toEqual('name');
expect((resolvedEntity2.attributes.allItems[1] as CdmTypeAttributeDefinition).name)
.toEqual('age');
expect((resolvedEntity2.attributes.allItems[2] as CdmTypeAttributeDefinition).name)
.toEqual('address');
expect((resolvedEntity2.attributes.allItems[3] as CdmTypeAttributeDefinition).name)
.toEqual('phoneNumber');
expect((resolvedEntity2.attributes.allItems[4] as CdmTypeAttributeDefinition).name)
.toEqual('email');
expect((resolvedEntity2.attributes.allItems[5] as CdmTypeAttributeDefinition).name)
.toEqual('PersonInfo_display');
validateInSupportOfAttribute(resolvedEntity2.attributes.allItems[5], 'email');
});
/**
* Test resolving an entity attribute using resolution guidance
*/
it('testEntityAttribute', async () => {
const testName: string = 'testEntityAttribute';
const entityName: string = 'NewPerson';
const corpus: CdmCorpusDefinition = testHelper.getLocalCorpus(testsSubpath, testName);
for (const resOpt of resOptsCombinations) {
await projectionTestUtils.loadEntityForResolutionOptionAndSave(corpus, testName, testsSubpath, entityName, resOpt);
}
const entity: CdmEntityDefinition = await corpus.fetchObjectAsync<CdmEntityDefinition>(`local:/${entityName}.cdm.json/${entityName}`);
let resolvedEntity: CdmEntityDefinition = await projectionTestUtils.getResolvedEntity(corpus, entity, ['referenceOnly']);
// Original set of attributes: ['name', 'age', 'address', 'phoneNumber', 'email']
expect(resolvedEntity.attributes.length)
.toEqual(2);
expect((resolvedEntity.attributes.allItems[0] as CdmTypeAttributeDefinition).name)
.toEqual('id');
expect((resolvedEntity.attributes.allItems[1] as CdmTypeAttributeDefinition).name)
.toEqual('PersonInfo_display');
validateInSupportOfAttribute(resolvedEntity.attributes.allItems[1], 'id', false);
// Resolve without directives
resolvedEntity = await projectionTestUtils.getResolvedEntity(corpus, entity, []);
// Original set of attributes: ['name', 'age', 'address', 'phoneNumber', 'email']
expect(resolvedEntity.attributes.length)
.toEqual(6);
expect((resolvedEntity.attributes.allItems[0] as CdmTypeAttributeDefinition).name)
.toEqual('name');
expect((resolvedEntity.attributes.allItems[1] as CdmTypeAttributeDefinition).name)
.toEqual('age');
expect((resolvedEntity.attributes.allItems[2] as CdmTypeAttributeDefinition).name)
.toEqual('address');
expect((resolvedEntity.attributes.allItems[3] as CdmTypeAttributeDefinition).name)
.toEqual('phoneNumber');
expect((resolvedEntity.attributes.allItems[4] as CdmTypeAttributeDefinition).name)
.toEqual('email');
expect((resolvedEntity.attributes.allItems[5] as CdmTypeAttributeDefinition).name)
.toEqual('PersonInfo_display');
validateInSupportOfAttribute(resolvedEntity.attributes.allItems[5], 'email', false);
});
/**
* Test resolving an entity attribute with add supporint attribute operation
*/
it('testEntityAttributeProj', async () => {
const testName: string = 'testEntityAttributeProj';
const entityName: string = 'NewPerson';
const corpus: CdmCorpusDefinition = testHelper.getLocalCorpus(testsSubpath, testName);
for (const resOpt of resOptsCombinations) {
await projectionTestUtils.loadEntityForResolutionOptionAndSave(corpus, testName, testsSubpath, entityName, resOpt);
}
const entity: CdmEntityDefinition = await corpus.fetchObjectAsync<CdmEntityDefinition>(`local:/${entityName}.cdm.json/${entityName}`);
const resolvedEntity: CdmEntityDefinition = await projectionTestUtils.getResolvedEntity(corpus, entity, ['referenceOnly']);
// Original set of attributes: ['name', 'age', 'address', 'phoneNumber', 'email']
expect(resolvedEntity.attributes.length)
.toEqual(6);
expect((resolvedEntity.attributes.allItems[0] as CdmTypeAttributeDefinition).name)
.toEqual('name');
expect((resolvedEntity.attributes.allItems[1] as CdmTypeAttributeDefinition).name)
.toEqual('age');
expect((resolvedEntity.attributes.allItems[2] as CdmTypeAttributeDefinition).name)
.toEqual('address');
expect((resolvedEntity.attributes.allItems[3] as CdmTypeAttributeDefinition).name)
.toEqual('phoneNumber');
expect((resolvedEntity.attributes.allItems[4] as CdmTypeAttributeDefinition).name)
.toEqual('email');
expect((resolvedEntity.attributes.allItems[5] as CdmTypeAttributeDefinition).name)
.toEqual('PersonInfo_display');
validateInSupportOfAttribute(resolvedEntity.attributes.allItems[5], 'email');
});
/**
* addSupportingAttribute on an entity definition using resolution guidance
*/
it('testExtendsEntity', async () => {
const testName: string = 'testExtendsEntity';
const entityName: string = 'NewPerson';
const corpus: CdmCorpusDefinition = testHelper.getLocalCorpus(testsSubpath, testName);
for (const resOpt of resOptsCombinations) {
await projectionTestUtils.loadEntityForResolutionOptionAndSave(corpus, testName, testsSubpath, entityName, resOpt);
}
const entity: CdmEntityDefinition = await corpus.fetchObjectAsync<CdmEntityDefinition>(`local:/${entityName}.cdm.json/${entityName}`);
const resolvedEntity: CdmEntityDefinition = await projectionTestUtils.getResolvedEntity(corpus, entity, []);
// Original set of attributes: ['name', 'age', 'address', 'phoneNumber', 'email']
// Supporting attribute: 'PersonInfo_display' (using extendsEntityResolutionGuidance)
expect(resolvedEntity.attributes.length)
.toEqual(6);
expect((resolvedEntity.attributes.allItems[0] as CdmTypeAttributeDefinition).name)
.toEqual('name');
expect((resolvedEntity.attributes.allItems[1] as CdmTypeAttributeDefinition).name)
.toEqual('age');
expect((resolvedEntity.attributes.allItems[2] as CdmTypeAttributeDefinition).name)
.toEqual('address');
expect((resolvedEntity.attributes.allItems[3] as CdmTypeAttributeDefinition).name)
.toEqual('phoneNumber');
expect((resolvedEntity.attributes.allItems[4] as CdmTypeAttributeDefinition).name)
.toEqual('email');
expect((resolvedEntity.attributes.allItems[5] as CdmTypeAttributeDefinition).name)
.toEqual('PersonInfo_display');
validateInSupportOfAttribute(resolvedEntity.attributes.allItems[5], 'email', false);
});
/**
* addSupportingAttribute on an entity definition
*/
it('testExtendsEntityProj', async () => {
const testName: string = 'testExtendsEntityProj';
const entityName: string = 'NewPerson';
const corpus: CdmCorpusDefinition = testHelper.getLocalCorpus(testsSubpath, testName);
for (const resOpt of resOptsCombinations) {
await projectionTestUtils.loadEntityForResolutionOptionAndSave(corpus, testName, testsSubpath, entityName, resOpt);
}
const entity: CdmEntityDefinition = await corpus.fetchObjectAsync<CdmEntityDefinition>(`local:/${entityName}.cdm.json/${entityName}`);
const resolvedEntity: CdmEntityDefinition = await projectionTestUtils.getResolvedEntity(corpus, entity, []);
// Original set of attributes: ['name', 'age', 'address', 'phoneNumber', 'email']
// Supporting attribute: 'PersonInfo_display' (using extendsEntityResolutionGuidance)
expect(resolvedEntity.attributes.length)
.toEqual(6);
expect((resolvedEntity.attributes.allItems[0] as CdmTypeAttributeDefinition).name)
.toEqual('name');
expect((resolvedEntity.attributes.allItems[1] as CdmTypeAttributeDefinition).name)
.toEqual('age');
expect((resolvedEntity.attributes.allItems[2] as CdmTypeAttributeDefinition).name)
.toEqual('address');
expect((resolvedEntity.attributes.allItems[3] as CdmTypeAttributeDefinition).name)
.toEqual('phoneNumber');
expect((resolvedEntity.attributes.allItems[4] as CdmTypeAttributeDefinition).name)
.toEqual('email');
expect((resolvedEntity.attributes.allItems[5] as CdmTypeAttributeDefinition).name)
.toEqual('PersonInfo_display');
validateInSupportOfAttribute(resolvedEntity.attributes.allItems[5], 'email');
});
/**
* Nested replaceAsForeignKey with addSupporingAttribute
*/
it('testNestedProj', async () => {
const testName: string = 'testNestedProj';
const entityName: string = 'NewPerson';
const corpus: CdmCorpusDefinition = testHelper.getLocalCorpus(testsSubpath, testName);
for (const resOpt of resOptsCombinations) {
await projectionTestUtils.loadEntityForResolutionOptionAndSave(corpus, testName, testsSubpath, entityName, resOpt);
}
const entity: CdmEntityDefinition = await corpus.fetchObjectAsync<CdmEntityDefinition>(`local:/${entityName}.cdm.json/${entityName}`);
const resolvedEntity: CdmEntityDefinition = await projectionTestUtils.getResolvedEntity(corpus, entity, ['referenceOnly']);
// Original set of attributes: ['name', 'age', 'address', 'phoneNumber', 'email']
expect(resolvedEntity.attributes.length)
.toEqual(2);
expect((resolvedEntity.attributes.allItems[0] as CdmTypeAttributeDefinition).name)
.toEqual('personId');
expect((resolvedEntity.attributes.allItems[1] as CdmTypeAttributeDefinition).name)
.toEqual('PersonInfo_display');
validateInSupportOfAttribute(resolvedEntity.attributes.allItems[1], 'personId');
});
/**
* Test resolving a type attribute with a nested add supporting attribute operation
*/
it('TestNestedTypeAttributeProj', async () => {
const testName: string = 'testNestedTAProj';
const entityName: string = 'NewPerson';
const corpus: CdmCorpusDefinition = testHelper.getLocalCorpus(testsSubpath, testName);
for (const resOpt of resOptsCombinations) {
await projectionTestUtils.loadEntityForResolutionOptionAndSave(corpus, testName, testsSubpath, entityName, resOpt);
}
const entity: CdmEntityDefinition = await corpus.fetchObjectAsync<CdmEntityDefinition>(`local:/${entityName}.cdm.json/${entityName}`);
const resolvedEntity: CdmEntityDefinition = await projectionTestUtils.getResolvedEntity(corpus, entity, ['referenceOnly']);
// Original set of attributes: ["PersonInfo"]
expect(resolvedEntity.attributes.length)
.toEqual(2);
expect((resolvedEntity.attributes.allItems[0] as CdmTypeAttributeDefinition).name)
.toEqual('name');
const supportingAttribute: CdmTypeAttributeDefinition = resolvedEntity.attributes.allItems[1] as CdmTypeAttributeDefinition;
expect(supportingAttribute.name)
.toEqual('name_display');
validateInSupportOfAttribute(supportingAttribute, 'name', false);
});
/**
* Test resolving a type attribute using resolution guidance
*/
it('testTypeAttribute', async () => {
const testName: string = 'testTypeAttribute';
const entityName: string = 'NewPerson';
const corpus: CdmCorpusDefinition = testHelper.getLocalCorpus(testsSubpath, testName);
// for (const resOpt of resOptsCombinations) {
// await projectionTestUtils.loadEntityForResolutionOptionAndSave(corpus, testName, testsSubpath, entityName, resOpt);
// }
const entity: CdmEntityDefinition = await corpus.fetchObjectAsync<CdmEntityDefinition>(`local:/${entityName}.cdm.json/${entityName}`);
const resolvedEntity: CdmEntityDefinition = await projectionTestUtils.getResolvedEntity(corpus, entity, ['structured']);
// Original set of attributes: ["PersonInfo"]
expect(resolvedEntity.attributes.length)
.toEqual(2);
expect((resolvedEntity.attributes.allItems[0] as CdmTypeAttributeDefinition).name)
.toEqual('PersonInfo');
const supportingAttribute: CdmTypeAttributeDefinition = resolvedEntity.attributes.allItems[1] as CdmTypeAttributeDefinition;
expect(supportingAttribute.name)
.toEqual('PersonInfo_display');
validateInSupportOfAttribute(supportingAttribute, 'PersonInfo', false);
});
/**
* Test resolving a type attribute with a nested add supporting attribute operation
*/
it('testTypeAttributeProj', async () => {
const testName: string = 'testTypeAttributeProj';
const entityName: string = 'NewPerson';
const corpus: CdmCorpusDefinition = testHelper.getLocalCorpus(testsSubpath, testName);
for (const resOpt of resOptsCombinations) {
await projectionTestUtils.loadEntityForResolutionOptionAndSave(corpus, testName, testsSubpath, entityName, resOpt);
}
const entity: CdmEntityDefinition = await corpus.fetchObjectAsync<CdmEntityDefinition>(`local:/${entityName}.cdm.json/${entityName}`);
const resolvedEntity: CdmEntityDefinition = await projectionTestUtils.getResolvedEntity(corpus, entity, ['structured']);
// Original set of attributes: ["PersonInfo"]
expect(resolvedEntity.attributes.length)
.toEqual(2);
expect((resolvedEntity.attributes.allItems[0] as CdmTypeAttributeDefinition).name)
.toEqual('PersonInfo');
const supportingAttribute: CdmTypeAttributeDefinition = resolvedEntity.attributes.allItems[1] as CdmTypeAttributeDefinition;
expect(supportingAttribute.name)
.toEqual('PersonInfo_display');
validateInSupportOfAttribute(supportingAttribute, 'PersonInfo', false);
});
/**
* Validates that the supporting attribute has the 'is.addedInSupportOf' and 'is.virtual.attribute' traits
* @param supportingAttribute
* @param fromAttribute
*/
function validateInSupportOfAttribute(supportingAttribute: CdmAttributeItem, fromAttribute: string, checkVirtualTrait: boolean = true): void {
const inSupportOfTrait: CdmTraitReference = supportingAttribute.appliedTraits.item('is.addedInSupportOf') as CdmTraitReference;
expect(inSupportOfTrait)
.not
.toBeUndefined();
expect(inSupportOfTrait.arguments.length)
.toEqual(1);
expect(inSupportOfTrait.arguments.allItems[0].value)
.toEqual(fromAttribute);
if (checkVirtualTrait) {
expect(supportingAttribute.appliedTraits.item('is.virtual.attribute'))
.not
.toBeUndefined();
}
}
}); | the_stack |
import { ClassificationGuideBody } from '../model/classificationGuideBody';
import { ClassificationGuideEntry } from '../model/classificationGuideEntry';
import { ClassificationGuidePaging } from '../model/classificationGuidePaging';
import { InstructionEntry } from '../model/instructionEntry';
import { SubtopicPaging } from '../model/subtopicPaging';
import { TopicBody } from '../model/topicBody';
import { TopicEntry } from '../model/topicEntry';
import { TopicPaging } from '../model/topicPaging';
import { BaseApi } from './base.api';
import { buildCollectionParam } from '../../../alfrescoApiClient';
import { throwIfNotDefined } from '../../../assert';
/**
* Classificationguides service.
* @module ClassificationGuidesApi
*/
export class ClassificationGuidesApi extends BaseApi {
/**
* Combined instructions
*
* Combines instructions from the given topics and the user defined instruction, if any.
*
* @param opts Optional parameters
* @param opts.instructions Instructions
* @return Promise<InstructionEntry>
*/
combinedInstructions(opts?: any): Promise<InstructionEntry> {
opts = opts || {};
let postBody = opts['instructions'];
let pathParams = {
};
let queryParams = {
};
let headerParams = {
};
let formParams = {
};
let contentTypes = ['application/json'];
let accepts = ['application/json'];
return this.apiClient.callApi(
'/combined-instructions', 'POST',
pathParams, queryParams, headerParams, formParams, postBody,
contentTypes, accepts, InstructionEntry);
}
/**
* Create a classification guide
*
* Creates a new classification guide.
*
* @param classificationGuide Classification guide
* @return Promise<ClassificationGuideEntry>
*/
createClassificationGuide(classificationGuide: ClassificationGuideBody): Promise<ClassificationGuideEntry> {
throwIfNotDefined(classificationGuide, 'classificationGuide');
let postBody = classificationGuide;
let pathParams = {
};
let queryParams = {
};
let headerParams = {
};
let formParams = {
};
let contentTypes = ['application/json'];
let accepts = ['application/json'];
return this.apiClient.callApi(
'/classification-guides', 'POST',
pathParams, queryParams, headerParams, formParams, postBody,
contentTypes, accepts, ClassificationGuideEntry);
}
/**
* Create a subtopic
*
* Creates a new subtopic of a topic.
*
* @param topicId The identifier for the topic
* @param topic Subtopic
* @param opts Optional parameters
* @param opts.include Returns additional information about the topic. The following optional fields can be requested:
* hasSubtopics - A flag indicating whether the topic already contains any subtopics.
* instruction - Contains details of any instruction in the topic.
* path - An ordered list of id-name pairs of all ancestor topics and the classification guide.
* classificationGuide - The classification guide this topic is in.
* @return Promise<TopicEntry>
*/
createSubtopic(topicId: string, topic: TopicBody, opts?: any): Promise<TopicEntry> {
throwIfNotDefined(topicId, 'topicId');
throwIfNotDefined(topic, 'topic');
opts = opts || {};
let postBody = topic;
let pathParams = {
'topicId': topicId
};
let queryParams = {
'include': buildCollectionParam(opts['include'], 'csv')
};
let headerParams = {
};
let formParams = {
};
let contentTypes = ['application/json'];
let accepts = ['application/json'];
return this.apiClient.callApi(
'/topics/{topicId}/subtopics', 'POST',
pathParams, queryParams, headerParams, formParams, postBody,
contentTypes, accepts, TopicEntry);
}
/**
* Create a topic
*
* Creates a new topic.
*
* @param classificationGuideId The identifier for the classification guide
* @param topic Topic
* @param opts Optional parameters
* @param opts.include Returns additional information about the topic. The following optional fields can be requested:
* hasSubtopics - A flag indicating whether the topic already contains any subtopics.
* instruction - Contains details of any instruction in the topic.
* path - An ordered list of id-name pairs of all ancestor topics and the classification guide.
* classificationGuide - The classification guide this topic is in.
* @return Promise<TopicEntry>
*/
createTopic(classificationGuideId: string, topic: TopicBody, opts?: any): Promise<TopicEntry> {
throwIfNotDefined(classificationGuideId, 'classificationGuideId');
throwIfNotDefined(topic, 'topic');
opts = opts || {};
let postBody = topic;
let pathParams = {
'classificationGuideId': classificationGuideId
};
let queryParams = {
'include': buildCollectionParam(opts['include'], 'csv')
};
let headerParams = {
};
let formParams = {
};
let contentTypes = ['application/json'];
let accepts = ['application/json'];
return this.apiClient.callApi(
'/classification-guides/{classificationGuideId}/topics', 'POST',
pathParams, queryParams, headerParams, formParams, postBody,
contentTypes, accepts, TopicEntry);
}
/**
* Delete a classification guide
*
* Deletes the classification guide with id **classificationGuideId**, including any topics and instructions.
*
* @param classificationGuideId The identifier for the classification guide
* @return Promise<{}>
*/
deleteClassificationGuide(classificationGuideId: string): Promise<any> {
throwIfNotDefined(classificationGuideId, 'classificationGuideId');
let postBody = null;
let pathParams = {
'classificationGuideId': classificationGuideId
};
let queryParams = {
};
let headerParams = {
};
let formParams = {
};
let contentTypes = ['application/json'];
let accepts = ['application/json'];
return this.apiClient.callApi(
'/classification-guides/{classificationGuideId}', 'DELETE',
pathParams, queryParams, headerParams, formParams, postBody,
contentTypes, accepts);
}
/**
* Delete a topic
*
* Deletes the topic with id **topicId**, including any subtopics and instructions.
*
* @param topicId The identifier for the topic
* @return Promise<{}>
*/
deleteTopic(topicId: string): Promise<any> {
throwIfNotDefined(topicId, 'topicId');
let postBody = null;
let pathParams = {
'topicId': topicId
};
let queryParams = {
};
let headerParams = {
};
let formParams = {
};
let contentTypes = ['application/json'];
let accepts = ['application/json'];
return this.apiClient.callApi(
'/topics/{topicId}', 'DELETE',
pathParams, queryParams, headerParams, formParams, postBody,
contentTypes, accepts);
}
/**
* List all classification guides
*
* Gets all classification guides.
*
* @param opts Optional parameters
* @param opts.include Returns additional information about the guide. The following optional fields can be requested:
* hasTopics - A flag indicating whether the guide already contains any topics.
* @param opts.skipCount The number of entities that exist in the collection before those included in this list.
* @param opts.maxItems The maximum number of items to return in the list.
* @param opts.orderBy A string to control the order of the entities returned in a list. You can use the **orderBy** parameter to
sort the list by one or more fields.
Each field has a default sort order, which is normally ascending order. Read the API method implementation notes
above to check if any fields used in this method have a descending default search order.
To sort the entities in a specific order, you can use the **ASC** and **DESC** keywords for any field.
* @param opts.where A string to restrict the returned objects by using a predicate. Supported operations are AND, NOT, and OR. Fields to filter on:
* enabled - e.g. (enabled = true OR enabled = false)
* @return Promise<ClassificationGuidePaging>
*/
listClassificationGuides(opts?: any): Promise<ClassificationGuidePaging> {
opts = opts || {};
let postBody = null;
let pathParams = {
};
let queryParams = {
'include': buildCollectionParam(opts['include'], 'csv'),
'skipCount': opts['skipCount'],
'maxItems': opts['maxItems'],
'orderBy': buildCollectionParam(opts['orderBy'], 'csv'),
'where': opts['where']
};
let headerParams = {
};
let formParams = {
};
let contentTypes = ['application/json'];
let accepts = ['application/json'];
return this.apiClient.callApi(
'/classification-guides', 'GET',
pathParams, queryParams, headerParams, formParams, postBody,
contentTypes, accepts, ClassificationGuidePaging);
}
/**
* List all subtopics
*
* Gets all subtopics of a topic.
*
* @param topicId The identifier for the topic
* @param opts Optional parameters
* @param opts.include Returns additional information about the topic. The following optional fields can be requested:
* hasSubtopics - A flag indicating whether the topic already contains any subtopics.
* instruction - Contains details of any instruction in the topic.
* path - An ordered list of id-name pairs of all ancestor topics and the classification guide.
* classificationGuide - The classification guide this topic is in.
* @param opts.skipCount The number of entities that exist in the collection before those included in this list.
* @param opts.maxItems The maximum number of items to return in the list.
* @param opts.orderBy A string to control the order of the entities returned in a list. You can use the **orderBy** parameter to
sort the list by one or more fields.
Each field has a default sort order, which is normally ascending order. Read the API method implementation notes
above to check if any fields used in this method have a descending default search order.
To sort the entities in a specific order, you can use the **ASC** and **DESC** keywords for any field.
* @param opts.where A string to restrict the returned objects by using a predicate. Supported operations are AND, NOT, and OR. Fields to filter on:
* hasInstruction
* hasSubtopics
* @param opts.includeSource Also include **source** in addition to **entries** with folder information on the parent guide/topic
* @return Promise<SubtopicPaging>
*/
listSubtopics(topicId: string, opts?: any): Promise<SubtopicPaging> {
throwIfNotDefined(topicId, 'topicId');
opts = opts || {};
let postBody = null;
let pathParams = {
'topicId': topicId
};
let queryParams = {
'include': buildCollectionParam(opts['include'], 'csv'),
'skipCount': opts['skipCount'],
'maxItems': opts['maxItems'],
'orderBy': buildCollectionParam(opts['orderBy'], 'csv'),
'where': opts['where'],
'includeSource': opts['includeSource']
};
let headerParams = {
};
let formParams = {
};
let contentTypes = ['application/json'];
let accepts = ['application/json'];
return this.apiClient.callApi(
'/topics/{topicId}/subtopics', 'GET',
pathParams, queryParams, headerParams, formParams, postBody,
contentTypes, accepts, SubtopicPaging);
}
/**
* List all topics
*
* Gets all topics.
*
* @param classificationGuideId The identifier for the classification guide
* @param opts Optional parameters
* @param opts.include Returns additional information about the topic. The following optional fields can be requested:
* hasSubtopics - A flag indicating whether the topic already contains any subtopics.
* instruction - Contains details of any instruction in the topic.
* path - An ordered list of id-name pairs of all ancestor topics and the classification guide.
* classificationGuide - The classification guide this topic is in.
* @param opts.skipCount The number of entities that exist in the collection before those included in this list.
* @param opts.maxItems The maximum number of items to return in the list.
* @param opts.orderBy A string to control the order of the entities returned in a list. You can use the **orderBy** parameter to
sort the list by one or more fields.
Each field has a default sort order, which is normally ascending order. Read the API method implementation notes
above to check if any fields used in this method have a descending default search order.
To sort the entities in a specific order, you can use the **ASC** and **DESC** keywords for any field.
* @param opts.where A string to restrict the returned objects by using a predicate. Supported operations are AND, NOT, and OR e.g. (instruction=true and hasSubtopics=false). Fields to filter on:
* hasInstruction
* hasSubtopics
* @param opts.includeSource Also include **source** in addition to **entries** with folder information on the parent guide/topic
* @return Promise<TopicPaging>
*/
listTopics(classificationGuideId: string, opts?: any): Promise<TopicPaging> {
throwIfNotDefined(classificationGuideId, 'classificationGuideId');
opts = opts || {};
let postBody = null;
let pathParams = {
'classificationGuideId': classificationGuideId
};
let queryParams = {
'include': buildCollectionParam(opts['include'], 'csv'),
'skipCount': opts['skipCount'],
'maxItems': opts['maxItems'],
'orderBy': buildCollectionParam(opts['orderBy'], 'csv'),
'where': opts['where'],
'includeSource': opts['includeSource']
};
let headerParams = {
};
let formParams = {
};
let contentTypes = ['application/json'];
let accepts = ['application/json'];
return this.apiClient.callApi(
'/classification-guides/{classificationGuideId}/topics', 'GET',
pathParams, queryParams, headerParams, formParams, postBody,
contentTypes, accepts, TopicPaging);
}
/**
* Get classification guide information
*
* Gets the classification guide with id **classificationGuideId**.
*
* @param classificationGuideId The identifier for the classification guide
* @return Promise<ClassificationGuideEntry>
*/
showClassificationGuideById(classificationGuideId: string): Promise<ClassificationGuideEntry> {
throwIfNotDefined(classificationGuideId, 'classificationGuideId');
let postBody = null;
let pathParams = {
'classificationGuideId': classificationGuideId
};
let queryParams = {
};
let headerParams = {
};
let formParams = {
};
let contentTypes = ['application/json'];
let accepts = ['application/json'];
return this.apiClient.callApi(
'/classification-guides/{classificationGuideId}', 'GET',
pathParams, queryParams, headerParams, formParams, postBody,
contentTypes, accepts, ClassificationGuideEntry);
}
/**
* Get topic information
*
* Gets the topic with id **topicId**.
*
* @param topicId The identifier for the topic
* @param opts Optional parameters
* @param opts.include Returns additional information about the topic. The following optional fields can be requested:
* hasSubtopics - A flag indicating whether the topic already contains any subtopics.
* instruction - Contains details of any instruction in the topic.
* path - An ordered list of id-name pairs of all ancestor topics and the classification guide.
* classificationGuide - The classification guide this topic is in.
* @return Promise<TopicEntry>
*/
showTopicById(topicId: string, opts?: any): Promise<TopicEntry> {
throwIfNotDefined(topicId, 'topicId');
opts = opts || {};
let postBody = null;
let pathParams = {
'topicId': topicId
};
let queryParams = {
'include': buildCollectionParam(opts['include'], 'csv')
};
let headerParams = {
};
let formParams = {
};
let contentTypes = ['application/json'];
let accepts = ['application/json'];
return this.apiClient.callApi(
'/topics/{topicId}', 'GET',
pathParams, queryParams, headerParams, formParams, postBody,
contentTypes, accepts, TopicEntry);
}
/**
* Update a classification guide
*
* Updates the classification guide with id **classificationGuideId**. For example, you can rename a classification guide.
*
* @param classificationGuideId The identifier for the classification guide
* @param classificationGuide Classification guide
* @return Promise<ClassificationGuideEntry>
*/
updateClassificationGuide(classificationGuideId: string, classificationGuide: ClassificationGuideBody): Promise<ClassificationGuideEntry> {
throwIfNotDefined(classificationGuideId, 'classificationGuideId');
throwIfNotDefined(classificationGuide, 'classificationGuide');
let postBody = classificationGuide;
let pathParams = {
'classificationGuideId': classificationGuideId
};
let queryParams = {
};
let headerParams = {
};
let formParams = {
};
let contentTypes = ['application/json'];
let accepts = ['application/json'];
return this.apiClient.callApi(
'/classification-guides/{classificationGuideId}', 'PUT',
pathParams, queryParams, headerParams, formParams, postBody,
contentTypes, accepts, ClassificationGuideEntry);
}
/**
* Update a topic
*
* Updates the topic with id **topicId**.
Use this to rename a topic or to add, edit, or remove the instruction associated with it.
*
* @param topicId The identifier for the topic
* @param topic Topic
* @param opts Optional parameters
* @param opts.include Returns additional information about the topic. The following optional fields can be requested:
* hasSubtopics - A flag indicating whether the topic already contains any subtopics.
* instruction - Contains details of any instruction in the topic.
* path - An ordered list of id-name pairs of all ancestor topics and the classification guide.
* classificationGuide - The classification guide this topic is in.
* @return Promise<TopicEntry>
*/
updateTopic(topicId: string, topic: TopicBody, opts?: any): Promise<TopicEntry> {
throwIfNotDefined(topicId, 'topicId');
throwIfNotDefined(topic, 'topic');
opts = opts || {};
let postBody = topic;
let pathParams = {
'topicId': topicId
};
let queryParams = {
'include': buildCollectionParam(opts['include'], 'csv')
};
let headerParams = {
};
let formParams = {
};
let contentTypes = ['application/json'];
let accepts = ['application/json'];
return this.apiClient.callApi(
'/topics/{topicId}', 'PUT',
pathParams, queryParams, headerParams, formParams, postBody,
contentTypes, accepts, TopicEntry);
}
} | the_stack |
import { strict as assert } from "assert";
import { TelemetryNullLogger } from "@fluidframework/common-utils";
import {
channelsTreeName,
CreateChildSummarizerNodeParam,
CreateSummarizerNodeSource,
ISummarizerNode,
ISummarizerNodeConfig,
} from "@fluidframework/runtime-definitions";
import { ISequencedDocumentMessage, ISnapshotTree, SummaryType } from "@fluidframework/protocol-definitions";
import {
createRootSummarizerNode,
IRootSummarizerNode,
} from "../summarizerNode";
import { mergeStats } from "../summaryUtils";
// eslint-disable-next-line import/no-internal-modules
import { SummarizerNode } from "../summarizerNode/summarizerNode";
describe("Runtime", () => {
describe("Summarization", () => {
describe("Summarizer Node", () => {
const names = ["root", "mid", "leaf"] as const;
const ids = ["rootId", "midId", "leafId"] as const;
let rootNode: IRootSummarizerNode;
let midNode: ISummarizerNode | undefined;
let leafNode: ISummarizerNode | undefined;
const logger = new TelemetryNullLogger();
let summarizeCalls = [0, 0, 0];
function assertSummarizeCalls(...expected: [root: number, mid: number, leaf: number]) {
for (let i = 0; i < expected.length; i++) {
assert(expected[i] === summarizeCalls[i],
`unexpected ${names[i]} summarize call count: ${expected[i]} !== ${summarizeCalls[i]}`);
}
}
const getSummarizeInternalFn = (depth: 0 | 1 | 2) => async (fullTree: boolean) => {
summarizeCalls[depth]++;
return {
id: ids[depth],
pathPartsForChildren: undefined, // extra path parts between nodes
stats: mergeStats(),
summary: { type: SummaryType.Tree, tree: {} } as const,
};
};
function createRoot({
changeSeq = 1,
refSeq,
...config
}: Partial<ISummarizerNodeConfig & {
changeSeq: number;
refSeq: number;
}> = {}) {
rootNode = createRootSummarizerNode(
logger,
getSummarizeInternalFn(0),
changeSeq,
refSeq,
config,
);
}
function createMid(
createParam: CreateChildSummarizerNodeParam,
config?: ISummarizerNodeConfig,
) {
midNode = rootNode.createChild(
getSummarizeInternalFn(1),
ids[1],
createParam,
config,
);
}
function createLeaf(
createParam: CreateChildSummarizerNodeParam,
config?: ISummarizerNodeConfig,
) {
leafNode = midNode?.createChild(
getSummarizeInternalFn(2),
ids[2],
createParam,
config,
);
}
function expectThrow(
fn: () => unknown,
failMsg: string,
errMsg: string,
...expectedErrors: string[]
): void {
try {
fn();
throw Error(`${failMsg}: Expected to fail`);
} catch (error) {
assert(expectedErrors.some((e) => e === error.message), errMsg);
}
}
async function expectReject(
fn: () => Promise<unknown>,
failMsg: string,
errMsg: string,
...expectedErrors: string[]
): Promise<void> {
try {
await fn();
throw Error(`${failMsg}: Expected to reject`);
} catch (error) {
assert(expectedErrors.some((e) => e === error.message), errMsg);
}
}
const summaryRefSeq = 123;
const blobs = {
protocolAttributes: { sequenceNumber: summaryRefSeq },
} as const;
const readAndParseBlob = async <T>(id: string) => blobs[id] as T;
const fakeOp = (sequenceNumber: number): ISequencedDocumentMessage =>
// eslint-disable-next-line @typescript-eslint/consistent-type-assertions
({ sequenceNumber } as ISequencedDocumentMessage);
const emptySnapshot: ISnapshotTree = { blobs: {}, commits: {}, trees: {} };
const protocolTree: ISnapshotTree = { blobs: { attributes: "protocolAttributes" }, commits: {}, trees: {} };
const coreSnapshot: ISnapshotTree = { blobs: {}, commits: {}, trees: {
[ids[1]]: { blobs: {}, commits: {}, trees: {
[ids[2]]: emptySnapshot,
} },
} };
const simpleSnapshot: ISnapshotTree = { blobs: {}, commits: {}, trees: {
...coreSnapshot.trees,
".protocol": protocolTree,
} };
const channelsSnapshot: ISnapshotTree = { blobs: {}, commits: {}, trees: {
[channelsTreeName]: coreSnapshot,
".protocol": protocolTree,
} };
const getSnapshot = async () => simpleSnapshot;
beforeEach(() => {
summarizeCalls = [0, 0, 0];
});
describe("Create Child", () => {
it("Should fail to create child from summary if parent does not have summary", () => {
createRoot();
expectThrow(
() => createMid({ type: CreateSummarizerNodeSource.FromSummary }),
"create child",
"no parent summary",
"0x1ac",
);
assert(midNode === undefined, "should not be created");
});
it("Should fail to create child with same id", () => {
createRoot();
createMid({ type: CreateSummarizerNodeSource.Local });
expectThrow(
() => createMid({ type: CreateSummarizerNodeSource.Local }),
"create child",
"child node with same id already exists",
"0x1ab",
);
});
});
describe("Load Base Summary", () => {
it("Load base summary without differential should do nothing for simple snapshot", async () => {
createRoot({ refSeq: 1 });
rootNode.loadBaseSummaryWithoutDifferential(simpleSnapshot);
const latestSummary = (rootNode as SummarizerNode).latestSummary;
assert(latestSummary !== undefined, "latest summary should exist");
assert.strictEqual(latestSummary.additionalPath?.path, undefined,
"should not have any path parts for children");
});
it("Load base summary without differential should strip channels subtree", async () => {
createRoot({ refSeq: 1 });
rootNode.loadBaseSummaryWithoutDifferential(channelsSnapshot);
const latestSummary = (rootNode as SummarizerNode).latestSummary;
assert(latestSummary !== undefined, "latest summary should exist");
assert.strictEqual(latestSummary.additionalPath?.path, channelsTreeName,
"should have channels path for children");
});
it("Load base summary should do nothing for simple snapshot", async () => {
createRoot({ refSeq: 1 });
const { baseSummary, outstandingOps } = await rootNode.loadBaseSummary(
simpleSnapshot, readAndParseBlob);
assert.strictEqual(outstandingOps.length, 0, "no outstanding ops");
assert.strictEqual(Object.keys(baseSummary.trees).length, 2, "only 2 subtrees");
assert(baseSummary.trees[ids[1]] !== undefined, "mid subtree");
const latestSummary = (rootNode as SummarizerNode).latestSummary;
assert(latestSummary !== undefined, "latest summary should exist");
assert.strictEqual(latestSummary.additionalPath?.path, undefined,
"should not have any path parts for children");
});
it("Load base summary should strip channels subtree", async () => {
createRoot({ refSeq: 1 });
const { baseSummary, outstandingOps } = await rootNode.loadBaseSummary(
channelsSnapshot, readAndParseBlob);
assert.strictEqual(outstandingOps.length, 0, "no outstanding ops");
assert.strictEqual(Object.keys(baseSummary.trees).length, 2, "only 2 subtrees");
assert(baseSummary.trees[channelsTreeName] !== undefined, "channels subtree");
const latestSummary = (rootNode as SummarizerNode).latestSummary;
assert(latestSummary !== undefined, "latest summary should exist");
assert.strictEqual(latestSummary.additionalPath?.path, channelsTreeName,
"should have channels path for children");
});
});
describe("Start Summary", () => {
it("Should fail startSummary if previous attempt is not completed/cleared", () => {
createRoot();
rootNode.startSummary(11, logger);
expectThrow(
() => rootNode.startSummary(12, logger),
"start summary",
"wip referenceSequenceNumber and logger are still set",
"0x19f", "0x1a0",
);
});
it("Should succeed startSummary if previous attempt is completed", async () => {
createRoot();
rootNode.startSummary(11, logger);
await rootNode.summarize(false);
rootNode.completeSummary("test-handle");
rootNode.startSummary(12, logger);
});
it("Should succeed startSummary if previous attempt is cleared", () => {
createRoot();
rootNode.startSummary(11, logger);
rootNode.clearSummary();
rootNode.startSummary(12, logger);
});
});
describe("Complete Summary", () => {
it("Should fail completeSummary if summarize not called", () => {
createRoot();
rootNode.startSummary(11, logger);
expectThrow(
() => rootNode.completeSummary("test-handle"),
"complete summary",
"tracked local paths not set",
"0x1a5",
);
});
it("Should fail completeSummary if summarize not called on child node", async () => {
createRoot();
createMid({ type: CreateSummarizerNodeSource.Local });
createLeaf({ type: CreateSummarizerNodeSource.Local });
rootNode.startSummary(11, logger);
await rootNode.summarize(false);
await leafNode?.summarize(false);
expectThrow(
() => rootNode.completeSummary("test-handle"),
"complete summary",
"tracked local paths not set",
"0x1a5",
);
});
});
describe("Summarize", () => {
it("Should fail summarize if startSummary is not called", async () => {
createRoot();
await expectReject(
async () => rootNode.summarize(false),
"summarize",
"no wip referenceSequenceNumber or logger",
"0x1a1", "0x1a2",
);
assertSummarizeCalls(0, 0, 0);
});
it("Should call summarize internal with later op", async () => {
createRoot({ refSeq: 11 });
rootNode.recordChange(fakeOp(12));
rootNode.startSummary(99, logger);
const result = await rootNode.summarize(false);
assertSummarizeCalls(1, 0, 0);
assert(result.summary.type === SummaryType.Tree, "should be tree");
});
it("Should call summarize internal with later invalidate", async () => {
createRoot({ refSeq: 11 });
rootNode.invalidate(12);
rootNode.startSummary(99, logger);
const result = await rootNode.summarize(false);
assertSummarizeCalls(1, 0, 0);
assert(result.summary.type === SummaryType.Tree, "should be tree");
});
it("Should not call summarize internal and instead use handle", async () => {
createRoot({ refSeq: 11 });
rootNode.recordChange(fakeOp(11));
rootNode.startSummary(99, logger);
const result = await rootNode.summarize(false);
assertSummarizeCalls(0, 0, 0);
assert(result.summary.type === SummaryType.Handle, "should be handle");
});
it("Should call summarize internal always when fullTree true", async () => {
createRoot({ refSeq: 11 });
rootNode.recordChange(fakeOp(10));
rootNode.startSummary(99, logger);
const result = await rootNode.summarize(true);
assertSummarizeCalls(1, 0, 0);
assert(result.summary.type === SummaryType.Tree, "should be tree");
});
});
describe("Refresh Latest Summary", () => {
it("Should refresh from tree when no proposal handle provided", async () => {
createRoot();
const result = await rootNode.refreshLatestSummary(
undefined,
summaryRefSeq,
getSnapshot,
readAndParseBlob,
logger,
);
assert(result.latestSummaryUpdated === true, "should update");
assert(result.wasSummaryTracked === false, "should not be tracked");
assert(result.snapshot !== undefined, "should have tree result");
});
it("Should refresh from tree when proposal handle not pending", async () => {
createRoot();
const result = await rootNode.refreshLatestSummary(
"test-handle",
summaryRefSeq,
getSnapshot,
readAndParseBlob,
logger,
);
assert(result.latestSummaryUpdated === true, "should update");
assert(result.wasSummaryTracked === false, "should not be tracked");
assert(result.snapshot !== undefined, "should have tree result");
});
it("Should not refresh latest if already passed ref seq number", async () => {
createRoot({ refSeq: summaryRefSeq });
const result = await rootNode.refreshLatestSummary(
undefined,
summaryRefSeq,
getSnapshot,
readAndParseBlob,
logger,
);
assert(result.latestSummaryUpdated === false, "we already got this summary");
});
it("Should refresh from pending", async () => {
createRoot();
const proposalHandle = "test-handle";
rootNode.startSummary(10, logger);
await rootNode.summarize(false);
rootNode.completeSummary(proposalHandle);
const result = await rootNode.refreshLatestSummary(
proposalHandle,
summaryRefSeq,
getSnapshot,
readAndParseBlob,
logger,
);
assert(result.latestSummaryUpdated === true, "should update");
assert(result.wasSummaryTracked === true, "should be tracked");
});
});
});
});
}); | the_stack |
import { asyncOperation, ClassType, CompilerContext, getClassName, isClass, isObject, urlJoin } from '@deepkit/core';
import {
assertType,
entity,
findMember,
getSerializeFunction,
getValidatorFunction,
metaAnnotation,
ReflectionClass,
ReflectionFunction,
ReflectionKind,
ReflectionParameter,
SerializationOptions,
serializer,
Serializer,
stringifyType,
Type,
typeToObject,
ValidationError
} from '@deepkit/type';
// @ts-ignore
import formidable from 'formidable';
import querystring from 'querystring';
import { HttpAction, httpClass, HttpController, HttpDecorator } from './decorator';
import { BodyValidationError, getRegExp, HttpRequest, HttpRequestQuery, HttpRequestResolvedParameters, ValidatedBody } from './model';
import { InjectorContext, InjectorModule, TagRegistry } from '@deepkit/injector';
import { Logger, LoggerInterface } from '@deepkit/logger';
import { HttpControllers } from './controllers';
import { MiddlewareRegistry, MiddlewareRegistryEntry } from '@deepkit/app';
import { HttpMiddlewareConfig, HttpMiddlewareFn } from './middleware';
//@ts-ignore
import qs from 'qs';
import { isArray } from '@deepkit/core';
export type RouteParameterResolverForInjector = ((injector: InjectorContext) => any[] | Promise<any[]>);
interface ResolvedController {
parameters: RouteParameterResolverForInjector;
routeConfig: RouteConfig;
uploadedFiles: { [name: string]: UploadedFile };
middlewares?: (injector: InjectorContext) => { fn: HttpMiddlewareFn, timeout: number }[];
}
@entity.name('@deepkit/UploadedFile')
export class UploadedFile {
/**
* The size of the uploaded file in bytes.
*/
size!: number;
/**
* The path this file is being written to.
*/
path!: string;
/**
* The name this file had according to the uploading client.
*/
name!: string | null;
/**
* The mime type of this file, according to the uploading client.
*/
type!: string | null;
/**
* A Date object (or `null`) containing the time this file was last written to.
* Mostly here for compatibility with the [W3C File API Draft](http://dev.w3.org/2006/webapi/FileAPI/).
*/
lastModifiedDate!: Date | null;
// /**
// * If `options.hash` calculation was set, you can read the hex digest out of this var.
// */
// hash!: string | 'sha1' | 'md5' | 'sha256' | null;
}
export interface RouteFunctionControllerAction {
type: 'function';
//if not set, the root module is used
module?: InjectorModule<any>;
fn: (...args: any[]) => any;
}
export interface RouteClassControllerAction {
type: 'controller';
//if not set, the root module is used
module?: InjectorModule<any>;
controller: ClassType;
methodName: string;
}
export class RouteConfig {
public baseUrl: string = '';
public responses: { statusCode: number, description: string, type?: Type }[] = [];
public description: string = '';
public groups: string[] = [];
public category: string = '';
public returnType?: Type;
public serializationOptions?: SerializationOptions;
public serializer?: Serializer;
/**
* When assigned defines where this route came from.
*/
public module?: InjectorModule<any>;
resolverForToken: Map<any, ClassType> = new Map();
middlewares: { config: HttpMiddlewareConfig, module?: InjectorModule<any> }[] = [];
resolverForParameterName: Map<string, ClassType> = new Map();
/**
* An arbitrary data container the user can use to store app specific settings/values.
*/
data = new Map<any, any>();
constructor(
public readonly name: string,
public readonly httpMethods: string[],
public readonly path: string,
public readonly action: RouteClassControllerAction | RouteFunctionControllerAction,
public internal: boolean = false,
) {
}
getReflectionFunction(): ReflectionFunction {
return this.action.type === 'controller' ?
ReflectionClass.from(this.action.controller).getMethod(this.action.methodName)
: ReflectionFunction.from(this.action.fn);
}
getSchemaForResponse(statusCode: number): Type | undefined {
if (!this.responses.length) return;
for (const response of this.responses) {
if (response.statusCode === statusCode) return response.type;
}
return;
}
getFullPath(): string {
let path = this.baseUrl ? urlJoin(this.baseUrl, this.path) : this.path;
if (!path.startsWith('/')) path = '/' + path;
return path;
}
}
class ParsedRoute {
public regex?: string;
public pathParameterNames: { [name: string]: number } = {};
protected parameters: ParsedRouteParameter[] = [];
constructor(public routeConfig: RouteConfig) {
}
addParameter(property: ReflectionParameter): ParsedRouteParameter {
const parameter = new ParsedRouteParameter(property);
this.parameters.push(parameter);
return parameter;
}
getParameters(): ParsedRouteParameter[] {
return this.parameters;
}
getParameter(name: string): ParsedRouteParameter {
for (const parameter of this.parameters) {
if (parameter.getName() === name) return parameter;
}
throw new Error(`No route parameter with name ${name} defined.`);
}
}
class ParsedRouteParameter {
regexPosition?: number;
constructor(
public parameter: ReflectionParameter,
) {
}
get body() {
return metaAnnotation.getForName(this.parameter.type, 'httpBody') !== undefined;
}
get bodyValidation() {
return metaAnnotation.getForName(this.parameter.type, 'httpBodyValidation') !== undefined;
}
getType(): Type {
if (this.bodyValidation) {
assertType(this.parameter.type, ReflectionKind.class);
const valueType = findMember('value', this.parameter.type);
if (!valueType || valueType.kind !== ReflectionKind.property) throw new Error(`No property value found at ${stringifyType(this.parameter.type)}`);
return valueType.type as Type;
}
return this.parameter.type;
}
get query() {
return metaAnnotation.getForName(this.parameter.type, 'httpQuery') !== undefined;
}
get queries() {
return metaAnnotation.getForName(this.parameter.type, 'httpQueries') !== undefined;
}
get typePath(): string | undefined {
const typeOptions = metaAnnotation.getForName(this.parameter.type, 'httpQueries') || metaAnnotation.getForName(this.parameter.type, 'httpQuery');
if (!typeOptions) return;
const options = typeToObject(typeOptions[0]);
if (isObject(options)) return options.name;
return;
}
getName() {
return this.parameter.name;
}
isPartOfPath(): boolean {
return this.regexPosition !== undefined;
}
}
function parseRoutePathToRegex(routeConfig: RouteConfig): { regex: string, parameterNames: { [name: string]: number } } {
const parameterNames: { [name: string]: number } = {};
let path = routeConfig.getFullPath();
const fn = routeConfig.getReflectionFunction();
let argumentIndex = 0;
path = path.replace(/:(\w+)/g, (a, name) => {
parameterNames[name] = argumentIndex;
argumentIndex++;
const parameter = fn.getParameterOrUndefined(name);
if (parameter) {
const regExp = getRegExp(parameter.type);
if (regExp instanceof RegExp) {
return '(' + regExp.source + ')';
} else if (regExp) {
return '(' + regExp + ')';
}
}
return String.raw`([^/]+)`;
});
return { regex: path, parameterNames };
}
export function parseRouteControllerAction(routeConfig: RouteConfig): ParsedRoute {
const parsedRoute = new ParsedRoute(routeConfig);
const methodArgumentProperties = routeConfig.getReflectionFunction().getParameters();
const parsedPath = parseRoutePathToRegex(routeConfig);
parsedRoute.regex = parsedPath.regex;
parsedRoute.pathParameterNames = parsedPath.parameterNames;
for (const property of methodArgumentProperties) {
const parsedParameter = parsedRoute.addParameter(property);
parsedParameter.regexPosition = parsedPath.parameterNames[property.name];
}
return parsedRoute;
}
export function dotToUrlPath(dotPath: string): string {
if (-1 === dotPath.indexOf('.')) return dotPath;
return dotPath.replace(/\./g, '][').replace('][', '[') + ']';
}
export interface RouteParameterResolver {
resolve(context: RouteParameterResolverContext): any | Promise<any>;
}
export interface RouteParameterResolverContext {
token: ClassType | string | symbol | any;
route: RouteConfig;
request: HttpRequest;
/**
* The parameter name (variable name).
*/
name: any;
/**
* The raw parameter value from the path, if the parameter is defined in the path (e.g. /user/:name).
* If not in the path, you have to use `parameters.<name>` instead.
*/
value: any;
query: HttpRequestQuery;
parameters: HttpRequestResolvedParameters;
}
function filterMiddlewaresForRoute(middlewareRawConfigs: MiddlewareRegistryEntry[], routeConfig: RouteConfig, fullPath: string): { config: HttpMiddlewareConfig, module: InjectorModule<any> }[] {
const middlewares = middlewareRawConfigs.slice(0);
middlewares.push(...routeConfig.middlewares as any[]);
const middlewareConfigs = middlewares.filter((v) => {
if (!(v.config instanceof HttpMiddlewareConfig)) return false;
if (v.config.controllers.length && routeConfig.action.type === 'controller' && !v.config.controllers.includes(routeConfig.action.controller)) {
return false;
}
if (v.config.excludeControllers.length && routeConfig.action.type === 'controller' && v.config.excludeControllers.includes(routeConfig.action.controller)) {
return false;
}
if (v.config.modules.length && (!routeConfig.module || !v.config.modules.includes(routeConfig.module))) {
if (!routeConfig.module) return false;
for (const module of v.config.modules) {
if (routeConfig.module.id !== module.id) return false;
}
}
if (v.config.selfModule && v.module.id !== routeConfig.module?.id) return false;
if (v.config.routeNames.length) {
for (const name of v.config.routeNames) {
if (name.includes('*')) {
const regex = new RegExp('^' + name.replace(/\*/g, '.*') + '$');
if (!regex.test(routeConfig.name)) return false;
} else if (name !== routeConfig.name) {
return false;
}
}
}
if (v.config.excludeRouteNames.length) {
for (const name of v.config.excludeRouteNames) {
if (name.includes('*')) {
const regex = new RegExp('^' + name.replace(/\*/g, '.*') + '$');
if (regex.test(routeConfig.name)) return false;
} else if (name == routeConfig.name) {
return false;
}
}
}
for (const route of v.config.routes) {
if (route.httpMethod && !routeConfig.httpMethods.includes(route.httpMethod)) return false;
if (route.category && route.category !== routeConfig.category) return false;
if (route.excludeCategory && route.excludeCategory === routeConfig.category) return false;
if (route.group && !routeConfig.groups.includes(route.group)) return false;
if (route.excludeGroup && routeConfig.groups.includes(route.excludeGroup)) return false;
if (route.path || route.pathRegExp) {
if (!route.pathRegExp && route.path) route.pathRegExp = new RegExp('^' + route.path.replace(/\*/g, '.*') + '$');
if (route.pathRegExp && !route.pathRegExp.test(fullPath)) return false;
}
}
return true;
}) as { config: HttpMiddlewareConfig, module: any }[];
middlewareConfigs.sort((a, b) => {
return a.config.order - b.config.order;
});
return middlewareConfigs;
}
export interface HttpRouterFunctionOptions {
path: string;
name?: string;
methods?: string[];
description?: string;
category?: string;
groups?: string[];
/**
* An arbitrary data container the user can use to store app specific settings/values.
*/
data?: Record<any, any>;
baseUrl?: string;
middlewares?: (() => HttpMiddlewareConfig)[];
serializer?: Serializer;
serializationOptions?: SerializationOptions;
resolverForToken?: Map<any, ClassType>;
resolverForParameterName?: Map<string, ClassType>;
responses?: { statusCode: number, description: string, type?: Type }[];
}
function convertOptions(methods: string[], pathOrOptions: string | HttpRouterFunctionOptions, defaultOptions: Partial<HttpRouterFunctionOptions>): HttpRouterFunctionOptions {
const options = 'string' === typeof pathOrOptions ? { path: pathOrOptions } : pathOrOptions;
if (options.methods) return options;
return { ...options, methods };
}
export abstract class HttpRouterRegistryFunctionRegistrar {
protected defaultOptions: Partial<HttpRouterFunctionOptions> = {};
abstract addRoute(routeConfig: RouteConfig): void;
/**
* Returns a new registrar object with default options that apply to each registered route through this registrar.
*
* ```typescript
* const registry: HttpRouterRegistry = ...;
*
* const secretRegistry = registry.forOptions({groups: ['secret']});
*
* secretRegistry.get('/admin/groups', () => {
* });
*
* secretRegistry.get('/admin/users', () => {
* });
* ```
*/
forOptions(options: Partial<HttpRouterFunctionOptions>): HttpRouterRegistryFunctionRegistrar {
const that = this;
return new class extends HttpRouterRegistryFunctionRegistrar {
defaultOptions = options;
addRoute(routeConfig: RouteConfig) {
that.addRoute(routeConfig);
}
};
}
public any(pathOrOptions: string | HttpRouterFunctionOptions, callback: (...args: any[]) => any) {
this.register(convertOptions([], pathOrOptions, this.defaultOptions), callback);
}
public add(decorator: HttpDecorator, callback: (...args: any[]) => any) {
const data = decorator(Object, '_');
const action = isArray(data) ? data.find(v => v instanceof HttpAction) : undefined;
if (!action) throw new Error('No HttpAction available');
const fn = ReflectionFunction.from(callback);
const routeConfig = createRouteConfigFromHttpAction({
type: 'function',
fn: callback,
}, action);
routeConfig.returnType = fn.getReturnType();
this.addRoute(routeConfig);
}
public get(pathOrOptions: string | HttpRouterFunctionOptions, callback: (...args: any[]) => any) {
this.register(convertOptions(['GET'], pathOrOptions, this.defaultOptions), callback);
}
public post(pathOrOptions: string | HttpRouterFunctionOptions, callback: (...args: any[]) => any) {
this.register(convertOptions(['POST'], pathOrOptions, this.defaultOptions), callback);
}
public put(pathOrOptions: string | HttpRouterFunctionOptions, callback: (...args: any[]) => any) {
this.register(convertOptions(['PUT'], pathOrOptions, this.defaultOptions), callback);
}
public patch(pathOrOptions: string | HttpRouterFunctionOptions, callback: (...args: any[]) => any) {
this.register(convertOptions(['PATCH'], pathOrOptions, this.defaultOptions), callback);
}
public delete(pathOrOptions: string | HttpRouterFunctionOptions, callback: (...args: any[]) => any) {
this.register(convertOptions(['DELETE'], pathOrOptions, this.defaultOptions), callback);
}
public options(pathOrOptions: string | HttpRouterFunctionOptions, callback: (...args: any[]) => any) {
this.register(convertOptions(['OPTIONS'], pathOrOptions, this.defaultOptions), callback);
}
public trace(pathOrOptions: string | HttpRouterFunctionOptions, callback: (...args: any[]) => any) {
this.register(convertOptions(['TRACE'], pathOrOptions, this.defaultOptions), callback);
}
public head(pathOrOptions: string | HttpRouterFunctionOptions, callback: (...args: any[]) => any) {
this.register(convertOptions(['HEAD'], pathOrOptions, this.defaultOptions), callback);
}
private register(options: HttpRouterFunctionOptions, callback: (...args: any[]) => any, module?: InjectorModule<any>) {
const fn = ReflectionFunction.from(callback);
const routeConfig = new RouteConfig(options.name || '', options.methods || [], options.path, {
type: 'function',
fn: callback,
});
routeConfig.module = module;
if (options.responses) routeConfig.responses = options.responses;
if (options.description) routeConfig.description = options.description;
if (options.category) routeConfig.category = options.category;
if (options.groups) routeConfig.groups = options.groups;
if (options.data) routeConfig.data = new Map(Object.entries(options.data));
if (options.baseUrl) routeConfig.baseUrl = options.baseUrl;
if (options.middlewares) {
routeConfig.middlewares = options.middlewares.map(v => {
return { config: v(), module };
});
}
if (options.resolverForToken) {
for (const item of options.resolverForToken) routeConfig.resolverForToken.set(...item);
}
if (options.resolverForToken) {
for (const item of options.resolverForToken) routeConfig.resolverForToken.set(...item);
}
if (options.resolverForParameterName) {
for (const item of options.resolverForParameterName) routeConfig.resolverForParameterName.set(...item);
}
routeConfig.serializer = options.serializer;
routeConfig.serializationOptions = options.serializationOptions;
routeConfig.returnType = fn.getReturnType();
this.addRoute(routeConfig);
}
}
function createRouteConfigFromHttpAction(routeAction: RouteClassControllerAction | RouteFunctionControllerAction, action: HttpAction, module?: InjectorModule<any>, controller?: HttpController) {
const routeConfig = new RouteConfig(action.name, action.httpMethods, action.path, routeAction);
routeConfig.responses = action.responses;
routeConfig.description = action.description;
routeConfig.category = action.category;
routeConfig.groups = action.groups;
routeConfig.data = new Map(action.data);
if (controller) {
routeConfig.baseUrl = controller.baseUrl;
routeConfig.middlewares = controller.middlewares.map(v => {
return { config: v(), module };
});
routeConfig.resolverForToken = new Map(controller.resolverForToken);
routeConfig.resolverForParameterName = new Map(controller.resolverForParameterName);
}
routeConfig.middlewares.push(...action.middlewares.map(v => {
return { config: v(), module };
}));
for (const item of action.resolverForToken) routeConfig.resolverForToken.set(...item);
for (const item of action.resolverForParameterName) routeConfig.resolverForParameterName.set(...item);
routeConfig.serializer = action.serializer;
routeConfig.serializationOptions = action.serializationOptions;
return routeConfig;
}
export class HttpRouterRegistry extends HttpRouterRegistryFunctionRegistrar {
protected routes: RouteConfig[] = [];
private buildId: number = 1;
public getBuildId(): number {
return this.buildId;
}
public getRoutes(): RouteConfig[] {
return this.routes;
}
public addRouteForController(controller: ClassType, module: InjectorModule<any>) {
const controllerData = httpClass._fetch(controller);
if (!controllerData) throw new Error(`Http controller class ${getClassName(controller)} has no @http.controller decorator.`);
const schema = ReflectionClass.from(controller);
for (const action of controllerData.getActions()) {
const routeAction: RouteClassControllerAction = {
type: 'controller',
controller,
module,
methodName: action.methodName
};
const routeConfig = createRouteConfigFromHttpAction(routeAction, action, module, controllerData);
routeConfig.module = module;
if (schema.hasMethod(action.methodName)) routeConfig.returnType = schema.getMethod(action.methodName).getReturnType();
this.addRoute(routeConfig);
}
}
public addRoute(routeConfig: RouteConfig) {
this.routes.push(routeConfig);
this.buildId++;
}
}
export class HttpRouter {
protected fn?: (request: HttpRequest) => ResolvedController | undefined;
protected buildId: number = 0;
protected resolveFn?: (name: string, parameters: { [name: string]: any }) => string;
private parseBody(req: HttpRequest, files: { [name: string]: UploadedFile }) {
const form = formidable({
multiples: true,
hash: 'sha1',
enabledPlugins: ['octetstream', 'querystring', 'json'],
});
return asyncOperation((resolve, reject) => {
if (req.body) {
return resolve(req.body);
}
form.parse(req, (err: any, fields: any, files: any) => {
if (err) {
reject(err);
} else {
const body = req.body = { ...fields, ...files };
resolve(body);
}
});
});
}
constructor(
controllers: HttpControllers,
private logger: LoggerInterface,
tagRegistry: TagRegistry,
private middlewareRegistry: MiddlewareRegistry = new MiddlewareRegistry,
private registry: HttpRouterRegistry = new HttpRouterRegistry,
) {
for (const controller of controllers.controllers) {
this.addRouteForController(controller.controller, controller.module);
}
}
getRoutes(): RouteConfig[] {
return this.registry.getRoutes();
}
static forControllers(
controllers: (ClassType | { module: InjectorModule<any>, controller: ClassType })[],
tagRegistry: TagRegistry = new TagRegistry(),
middlewareRegistry: MiddlewareRegistry = new MiddlewareRegistry(),
module: InjectorModule<any> = new InjectorModule()
): HttpRouter {
return new this(new HttpControllers(controllers.map(v => {
return isClass(v) ? { controller: v, module } : v;
})), new Logger([], []), tagRegistry, middlewareRegistry);
}
protected getRouteCode(compiler: CompilerContext, routeConfig: RouteConfig): string {
const routeConfigVar = compiler.reserveVariable('routeConfigVar', routeConfig);
const parsedRoute = parseRouteControllerAction(routeConfig);
const path = routeConfig.getFullPath();
const prefix = path.substr(0, path.indexOf(':'));
const regexVar = compiler.reserveVariable('regex', new RegExp('^' + parsedRoute.regex + '$'));
const setParameters: string[] = [];
const parameterNames: string[] = [];
const parameterValidator: string[] = [];
let bodyValidationErrorHandling = `if (bodyErrors.length) throw ValidationError.from(bodyErrors);`;
let enableParseBody = false;
const hasParameters = parsedRoute.getParameters().length > 0;
let requiresAsyncParameters = false;
let setParametersFromPath = '';
const fullPath = routeConfig.getFullPath();
const middlewareConfigs = filterMiddlewaresForRoute(this.middlewareRegistry.configs, routeConfig, fullPath);
for (const parameter of parsedRoute.getParameters()) {
if (parameter.body || parameter.bodyValidation) {
const type = parameter.getType();
const validatorVar = compiler.reserveVariable('argumentValidator', getValidatorFunction(undefined, type));
const converterVar = compiler.reserveVariable('argumentConverter', getSerializeFunction(type, serializer.deserializeRegistry));
enableParseBody = true;
setParameters.push(`parameters.${parameter.parameter.name} = ${converterVar}(bodyFields, {loosely: true});`);
parameterValidator.push(`${validatorVar}(parameters.${parameter.parameter.name}, {errors: bodyErrors});`);
if (parameter.bodyValidation) {
compiler.context.set('BodyValidation', ValidatedBody);
compiler.context.set('BodyValidationError', BodyValidationError);
parameterNames.push(`new BodyValidation(new BodyValidationError(bodyErrors), bodyErrors.length === 0 ? parameters.${parameter.parameter.name} : undefined)`);
bodyValidationErrorHandling = '';
} else {
parameterNames.push(`parameters.${parameter.parameter.name}`);
}
} else if (parameter.query || parameter.queries) {
const converted = getSerializeFunction(parameter.parameter.parameter, serializer.deserializeRegistry, undefined, parameter.getName());
const validator = getValidatorFunction(undefined, parameter.parameter.parameter,);
const converterVar = compiler.reserveVariable('argumentConverter', converted);
const validatorVar = compiler.reserveVariable('argumentValidator', validator);
const queryPath = parameter.typePath === undefined && parameter.query ? parameter.parameter.name : parameter.typePath;
const accessor = queryPath ? `['` + (queryPath.replace(/\./g, `']['`)) + `']` : '';
const queryAccessor = queryPath ? `_query${accessor}` : '_query';
setParameters.push(`parameters.${parameter.parameter.name} = ${converterVar}(${queryAccessor}, {loosely: true});`);
parameterNames.push(`parameters.${parameter.parameter.name}`);
parameterValidator.push(`${validatorVar}(parameters.${parameter.parameter.name}, {errors: validationErrors}, ${JSON.stringify(parameter.typePath || parameter.getName())});`);
} else {
parameterNames.push(`parameters.${parameter.parameter.name}`);
if (parameter.isPartOfPath()) {
if (parameter.parameter.type.kind !== ReflectionKind.class) {
const converted = getSerializeFunction(parameter.parameter.parameter, serializer.deserializeRegistry, undefined, parameter.getName());
const converterVar = compiler.reserveVariable('argumentConverter', converted);
setParameters.push(`parameters.${parameter.parameter.name} = ${converterVar}(_match[${1 + (parameter.regexPosition || 0)}], {loosely: true});`);
const validator = getValidatorFunction(undefined, parameter.parameter.parameter);
const validatorVar = compiler.reserveVariable('argumentValidator', validator);
parameterValidator.push(`${validatorVar}(parameters.${parameter.parameter.name}, {errors: validationErrors}, ${JSON.stringify(parameter.getName())});`);
} else {
setParameters.push(`parameters.${parameter.parameter.name} = _match[${1 + (parameter.regexPosition || 0)}];`);
}
}
const injectorToken = parameter.parameter.type.kind === ReflectionKind.class ? parameter.parameter.type.classType : undefined;
const injectorTokenVar = compiler.reserveVariable('classType', injectorToken);
const parameterResolverFoundVar = compiler.reserveVariable('parameterResolverFound', false);
setParameters.push(`${parameterResolverFoundVar} = false;`);
const resolver = routeConfig.resolverForParameterName.get(parameter.getName()) || routeConfig.resolverForToken.get(injectorToken);
//make sure all parameter values from the path are available
if (resolver && !setParametersFromPath) {
for (const i in parsedRoute.pathParameterNames) {
setParametersFromPath += `parameters.${i} = _match[${1 + parsedRoute.pathParameterNames[i]}];`;
}
}
let injector = '_injector';
const moduleVar = routeConfig.module ? ', ' + compiler.reserveConst(routeConfig.module, 'module') : '';
if (resolver) {
const resolverProvideTokenVar = compiler.reserveVariable('resolverProvideToken', resolver);
requiresAsyncParameters = true;
const instance = compiler.reserveVariable('resolverInstance');
setParameters.push(`
//resolver ${getClassName(resolver)} for ${parameter.getName()}
${instance} = ${injector}.get(${resolverProvideTokenVar}${moduleVar});
if (!${parameterResolverFoundVar}) {
${parameterResolverFoundVar} = true;
parameters.${parameter.parameter.name} = await ${instance}.resolve({
token: ${injectorTokenVar},
routeConfig: ${routeConfigVar},
request: request,
name: ${JSON.stringify(parameter.parameter.name)},
value: parameters.${parameter.parameter.name},
query: _query,
parameters: parameters
});
}`);
}
if (!parameter.isPartOfPath()) {
let injectorGet = `parameters.${parameter.parameter.name} = ${injector}.get(${injectorTokenVar});`;
if (parameter.parameter.isOptional()) {
injectorGet = `try {parameters.${parameter.parameter.name} = ${injector}.get(${injectorTokenVar}); } catch (e) {}`;
}
setParameters.push(`if (!${parameterResolverFoundVar}) ${injectorGet}`);
}
}
}
let parseBodyLoading = '';
if (enableParseBody) {
const parseBodyVar = compiler.reserveVariable('parseBody', this.parseBody.bind(this));
parseBodyLoading = `
const bodyFields = (await ${parseBodyVar}(request, uploadedFiles));`;
requiresAsyncParameters = true;
}
let matcher = `_path.startsWith(${JSON.stringify(prefix)}) && (_match = _path.match(${regexVar}))`;
if (!hasParameters) {
matcher = `_path === ${JSON.stringify(path)}`;
}
let middlewares = 'undefined';
if (middlewareConfigs.length) {
const middlewareItems: string[] = [];
for (const middlewareConfig of middlewareConfigs) {
const moduleVar = middlewareConfig.module ? ', ' + compiler.reserveVariable('module', middlewareConfig.module) : '';
for (const middleware of middlewareConfig.config.middlewares) {
if (isClass(middleware)) {
const classVar = compiler.reserveVariable('middlewareClassType', middleware);
middlewareItems.push(`{fn: function() {return _injector.get(${classVar}${moduleVar}).execute(...arguments) }, timeout: ${middlewareConfig.config.timeout}}`);
} else {
middlewareItems.push(`{fn: ${compiler.reserveVariable('middlewareFn', middleware)}, timeout: ${middlewareConfig.config.timeout}}`);
}
}
}
middlewares = `
function(_injector) {
return [${middlewareItems.join(', ')}];
}
`;
}
let parameters = '() => []';
if (setParameters.length) {
parameters = `${requiresAsyncParameters ? 'async' : ''} function(_injector){
const validationErrors = [];
const bodyErrors = [];
const parameters = {};
${setParametersFromPath}
${parseBodyLoading}
${setParameters.join('\n')}
${parameterValidator.join('\n')}
${bodyValidationErrorHandling}
if (validationErrors.length) throw new ValidationError(validationErrors);
return [${parameterNames.join(',')}];
}`;
}
let methodCheck = '';
if (routeConfig.httpMethods.length) {
methodCheck = '(' + routeConfig.httpMethods.map(v => {
return `_method === '${v.toUpperCase()}'`;
}).join(' || ') + ') && ';
}
return `
//=> ${routeConfig.httpMethods.join(',')} ${path}
if (${methodCheck}${matcher}) {
return {routeConfig: ${routeConfigVar}, parameters: ${parameters}, uploadedFiles: uploadedFiles, middlewares: ${middlewares}};
}
`;
}
protected getRouteUrlResolveCode(compiler: CompilerContext, routeConfig: RouteConfig): string {
const parsedRoute = parseRouteControllerAction(routeConfig);
let url = routeConfig.getFullPath();
url = url.replace(/:(\w+)/g, (a, name) => {
return `\${parameters.${name}}`;
});
const modify: string[] = [];
for (const parameter of parsedRoute.getParameters()) {
if (parameter.query || parameter.queries) {
const queryPath = parameter.typePath === undefined && parameter.query ? parameter.parameter.name : parameter.typePath || '';
if (parameter.parameter.type.kind === ReflectionKind.class || parameter.parameter.type.kind === ReflectionKind.objectLiteral) {
for (const property of ReflectionClass.from(parameter.parameter.type).getProperties()) {
const accessor = `parameters.${parameter.getName()}?.${property.name}`;
const thisPath = queryPath ? queryPath + '.' + property.name : property.name;
modify.push(`${accessor} !== undefined && query.push(${JSON.stringify(dotToUrlPath(thisPath))} + '=' + encodeURIComponent(${accessor}))`);
}
} else {
modify.push(`parameters.${parameter.getName()} !== undefined && query.push(${JSON.stringify(dotToUrlPath(queryPath))} + '=' + encodeURIComponent(parameters.${parameter.getName()}))`);
}
}
}
return `
case ${JSON.stringify(routeConfig.name)}: {
let url = \`${url}\`;
let query = [];
${modify.join('\n')}
return url + (query.length ? '?'+query.join('&') : '');
}
`;
}
public addRoute(routeConfig: RouteConfig) {
this.registry.addRoute(routeConfig);
}
public addRouteForController(controller: ClassType, module: InjectorModule<any>) {
this.registry.addRouteForController(controller, module);
}
protected build(): (request: HttpRequest) => ResolvedController | undefined {
this.buildId = this.registry.getBuildId();
const compiler = new CompilerContext;
compiler.context.set('ValidationError', ValidationError);
compiler.context.set('qs', qs);
const code: string[] = [];
for (const route of this.getRoutes()) {
code.push(this.getRouteCode(compiler, route));
}
return compiler.build(`
let _match;
const _method = request.method || 'GET';
const _url = request.url || '/';
const _qPosition = _url.indexOf('?');
let uploadedFiles = {};
const _path = _qPosition === -1 ? _url : _url.substr(0, _qPosition);
const _query = _qPosition === -1 ? {} : qs.parse(_url.substr(_qPosition + 1));
${code.join('\n')}
`, 'request') as any;
}
protected buildUrlResolver(): any {
const compiler = new CompilerContext;
const code: string[] = [];
for (const route of this.getRoutes()) {
code.push(this.getRouteUrlResolveCode(compiler, route));
}
return compiler.build(`
switch (name) {
${code.join('\n')}
}
throw new Error('No route for name ' + name + ' found');
`, 'name', 'parameters') as any;
}
public resolveUrl(routeName: string, parameters: { [name: string]: any } = {}): string {
if (!this.resolveFn) {
this.resolveFn = this.buildUrlResolver();
}
return this.resolveFn!(routeName, parameters);
}
public resolveRequest(request: HttpRequest): ResolvedController | undefined {
if (!this.fn || this.buildId !== this.registry.getBuildId()) {
this.fn = this.build();
}
return this.fn(request);
}
public resolve(method: string, url: string): ResolvedController | undefined {
method = method.toUpperCase();
return this.resolveRequest({ url, method } as any);
}
} | the_stack |
import * as Bluebird from 'bluebird';
import * as Dockerode from 'dockerode';
import { EventEmitter } from 'events';
import { isLeft } from 'fp-ts/lib/Either';
import * as JSONStream from 'JSONStream';
import * as _ from 'lodash';
import { promises as fs } from 'fs';
import StrictEventEmitter from 'strict-event-emitter-types';
import * as config from '../config';
import { docker } from '../lib/docker-utils';
import * as logger from '../logger';
import { PermissiveNumber } from '../config/types';
import constants = require('../lib/constants');
import {
InternalInconsistencyError,
NotFoundError,
StatusCodeError,
} from '../lib/errors';
import * as LogTypes from '../lib/log-types';
import { checkInt, isValidDeviceName } from '../lib/validation';
import { Service, ServiceStatus } from './service';
import { serviceNetworksToDockerNetworks } from './utils';
import log from '../lib/supervisor-console';
import logMonitor from '../logging/monitor';
interface ServiceManagerEvents {
change: void;
}
type ServiceManagerEventEmitter = StrictEventEmitter<
EventEmitter,
ServiceManagerEvents
>;
const events: ServiceManagerEventEmitter = new EventEmitter();
interface KillOpts {
removeContainer?: boolean;
wait?: boolean;
}
export const on: typeof events['on'] = events.on.bind(events);
export const once: typeof events['once'] = events.once.bind(events);
export const removeListener: typeof events['removeListener'] = events.removeListener.bind(
events,
);
export const removeAllListeners: typeof events['removeAllListeners'] = events.removeAllListeners.bind(
events,
);
// Whether a container has died, indexed by ID
const containerHasDied: Dictionary<boolean> = {};
let listening = false;
// Volatile state of containers, indexed by containerId (or random strings if
// we don't yet have an id)
const volatileState: Dictionary<Partial<Service>> = {};
export const getAll = async (
extraLabelFilters: string | string[] = [],
): Promise<Service[]> => {
const filterLabels = ['supervised'].concat(extraLabelFilters);
const containers = await listWithBothLabels(filterLabels);
const services = await Bluebird.map(containers, async (container) => {
try {
const serviceInspect = await docker.getContainer(container.Id).inspect();
const service = Service.fromDockerContainer(serviceInspect);
// We know that the containerId is set below, because `fromDockerContainer`
// always sets it
const vState = volatileState[service.containerId!];
if (vState != null && vState.status != null) {
service.status = vState.status;
}
return service;
} catch (e) {
if (NotFoundError(e)) {
return null;
}
throw e;
}
});
return services.filter((s) => s != null) as Service[];
};
async function get(service: Service) {
// Get the container ids for special network handling
const containerIds = await getContainerIdMap(
service.appUuid || service.appId,
);
const services = (
await getAll(`service-name=${service.serviceName}`)
).filter((currentService) =>
currentService.isEqualConfig(service, containerIds),
);
if (services.length === 0) {
const e: StatusCodeError = new Error(
'Could not find a container matching this service definition',
);
e.statusCode = 404;
throw e;
}
return services[0];
}
/**
* Get the current state of all supervised services
*/
export async function getState() {
const services = await getAll();
const status = _.clone(volatileState);
for (const service of services) {
if (service.containerId == null) {
throw new InternalInconsistencyError(
`containerId not defined in ServiceManager.getLegacyServicesState: ${service}`,
);
}
if (status[service.containerId] == null) {
status[service.containerId] = _.pick(service, [
'appId',
'appUuid',
'imageId',
'status',
'releaseId',
'commit',
'createdAt',
'serviceName',
]) as Partial<Service>;
}
}
return _.values(status);
}
export async function getByDockerContainerId(
containerId: string,
): Promise<Service | null> {
const container = await docker.getContainer(containerId).inspect();
if (
container.Config.Labels['io.balena.supervised'] == null &&
container.Config.Labels['io.resin.supervised'] == null
) {
return null;
}
return Service.fromDockerContainer(container);
}
export async function updateMetadata(service: Service, target: Service) {
const svc = await get(service);
if (svc.containerId == null) {
throw new InternalInconsistencyError(
`No containerId provided for service ${service.serviceName} in ServiceManager.updateMetadata. Service: ${service}`,
);
}
await docker.getContainer(svc.containerId).rename({
name: `${service.serviceName}_${target.imageId}_${target.releaseId}_${target.commit}`,
});
}
export async function handover(current: Service, target: Service) {
// We set the running container to not restart so that in case of a poweroff
// it doesn't come back after boot.
await prepareForHandover(current);
await start(target);
await waitToKill(
current,
target.config.labels['io.balena.update.handover-timeout'],
);
await kill(current);
}
export async function killAllLegacy(): Promise<void> {
// Containers haven't been normalized (this is an updated supervisor)
const supervisorImageId = (
await docker.getImage(constants.supervisorImage).inspect()
).Id;
for (const container of await docker.listContainers({ all: true })) {
if (container.ImageID !== supervisorImageId) {
await killContainer(container.Id, {
serviceName: 'legacy',
});
}
}
}
export function kill(service: Service, opts: KillOpts = {}) {
if (service.containerId == null) {
throw new InternalInconsistencyError(
`Attempt to kill container without containerId! Service :${service}`,
);
}
return killContainer(service.containerId, service, opts);
}
export async function remove(service: Service) {
logger.logSystemEvent(LogTypes.removeDeadService, { service });
const existingService = await get(service);
if (existingService.containerId == null) {
throw new InternalInconsistencyError(
`No containerId provided for service ${service.serviceName} in ServiceManager.updateMetadata. Service: ${service}`,
);
}
try {
await docker.getContainer(existingService.containerId).remove({ v: true });
} catch (e) {
if (!NotFoundError(e)) {
logger.logSystemEvent(LogTypes.removeDeadServiceError, {
service,
error: e,
});
throw e;
}
}
}
async function create(service: Service) {
const mockContainerId = config.newUniqueKey();
try {
const existing = await get(service);
if (existing.containerId == null) {
throw new InternalInconsistencyError(
`No containerId provided for service ${service.serviceName} in ServiceManager.updateMetadata. Service: ${service}`,
);
}
return docker.getContainer(existing.containerId);
} catch (e) {
if (!NotFoundError(e)) {
logger.logSystemEvent(LogTypes.installServiceError, {
service,
error: e,
});
throw e;
}
// TODO: this seems a bit late to be checking this
const deviceName = await config.get('name');
if (!isValidDeviceName(deviceName)) {
throw new Error(
'The device name contains a newline, which is unsupported by balena. ' +
'Please fix the device name',
);
}
// New services need to have an appUuid
if (service.appUuid == null) {
throw new InternalInconsistencyError(
'Attempt to start a service without an existing app uuid',
);
}
// We cannot get rid of appIds yet
if (service.appId == null) {
throw new InternalInconsistencyError(
'Attempt to start a service without an existing app id',
);
}
// Get all created services so far, there
const serviceContainerIds = await getContainerIdMap(service.appId);
const conf = service.toDockerContainer({
deviceName,
containerIds: serviceContainerIds,
});
const nets = serviceNetworksToDockerNetworks(service.extraNetworksToJoin());
logger.logSystemEvent(LogTypes.installService, { service });
reportNewStatus(mockContainerId, service, 'Installing');
const container = await docker.createContainer(conf);
service.containerId = container.id;
await Promise.all(
_.map((nets || {}).EndpointsConfig, (endpointConfig, name) =>
docker.getNetwork(name).connect({
Container: container.id,
EndpointConfig: endpointConfig,
}),
),
);
logger.logSystemEvent(LogTypes.installServiceSuccess, { service });
return container;
} finally {
reportChange(mockContainerId);
}
}
export async function start(service: Service) {
let alreadyStarted = false;
let containerId: string | null = null;
try {
const container = await create(service);
containerId = container.id;
logger.logSystemEvent(LogTypes.startService, { service });
reportNewStatus(containerId, service, 'Starting' as ServiceStatus);
let shouldRemove = false;
let err: Error | undefined;
try {
await container.start();
} catch (e) {
// Get the statusCode from the original cause and make sure it's
// definitely an int for comparison reasons
const maybeStatusCode = PermissiveNumber.decode(e.statusCode);
if (isLeft(maybeStatusCode)) {
shouldRemove = true;
err = new Error(`Could not parse status code from docker error: ${e}`);
throw err;
}
const statusCode = maybeStatusCode.right;
const message = e.message;
// 304 means the container was already started, precisely what we want
if (statusCode === 304) {
alreadyStarted = true;
} else if (
statusCode === 500 &&
_.isString(message) &&
message.trim().match(/exec format error$/)
) {
// Provide a friendlier error message for "exec format error"
const deviceType = await config.get('deviceType');
err = new Error(
`Application architecture incompatible with ${deviceType}: exec format error`,
);
throw err;
} else {
// rethrow the same error
err = e;
throw e;
}
} finally {
if (shouldRemove) {
// If starting the container fialed, we remove it so that it doesn't litter
await container.remove({ v: true }).catch(_.noop);
logger.logSystemEvent(LogTypes.startServiceError, {
service,
error: err,
});
}
}
const serviceId = service.serviceId;
const imageId = service.imageId;
if (serviceId == null || imageId == null) {
throw new InternalInconsistencyError(
`serviceId and imageId not defined for service: ${service.serviceName} in ServiceManager.start`,
);
}
logger.attach(container.id, { serviceId, imageId });
if (!alreadyStarted) {
logger.logSystemEvent(LogTypes.startServiceSuccess, { service });
}
service.config.running = true;
return container;
} finally {
if (containerId != null) {
reportChange(containerId);
}
}
}
export function listenToEvents() {
if (listening) {
return;
}
listening = true;
const listen = async () => {
const stream = await docker.getEvents({
filters: { type: ['container'] } as any,
});
stream.on('error', (e) => {
log.error(`Error on docker events stream:`, e);
});
const parser = JSONStream.parse();
parser.on('data', async (data: { status: string; id: string }) => {
if (data != null) {
const status = data.status;
if (status === 'die' || status === 'start' || status === 'destroy') {
try {
let service: Service | null = null;
try {
service = await getByDockerContainerId(data.id);
} catch (e) {
if (!NotFoundError(e)) {
throw e;
}
}
if (service != null) {
events.emit('change');
if (status === 'die') {
logger.logSystemEvent(LogTypes.serviceExit, { service });
containerHasDied[data.id] = true;
} else if (status === 'start' && containerHasDied[data.id]) {
delete containerHasDied[data.id];
logger.logSystemEvent(LogTypes.serviceRestart, {
service,
});
const serviceId = service.serviceId;
const imageId = service.imageId;
if (serviceId == null || imageId == null) {
throw new InternalInconsistencyError(
`serviceId and imageId not defined for service: ${service.serviceName} in ServiceManager.listenToEvents`,
);
}
logger.attach(data.id, {
serviceId,
imageId,
});
} else if (status === 'destroy') {
await logMonitor.detach(data.id);
}
}
} catch (e) {
log.error('Error on docker event:', e, e.stack);
}
}
}
});
return new Promise((resolve, reject) => {
parser
.on('error', (e: Error) => {
log.error('Error on docker events stream:', e);
reject(e);
})
.on('end', resolve);
stream.pipe(parser);
});
};
Bluebird.resolve(listen())
.catch((e) => {
log.error('Error listening to events:', e, e.stack);
})
.finally(() => {
listening = false;
setTimeout(listenToEvents, 1000);
});
return;
}
export async function attachToRunning() {
const services = await getAll();
for (const service of services) {
if (service.status === 'Running') {
const serviceId = service.serviceId;
const imageId = service.imageId;
if (serviceId == null || imageId == null) {
throw new InternalInconsistencyError(
`serviceId and imageId not defined for service: ${service.serviceName} in ServiceManager.start`,
);
}
if (service.containerId == null) {
throw new InternalInconsistencyError(
`containerId not defined for service: ${service.serviceName} in ServiceManager.attachToRunning`,
);
}
logger.attach(service.containerId, {
serviceId,
imageId,
});
}
}
}
async function getContainerIdMap(
appIdOrUuid: number | string,
): Promise<Dictionary<string>> {
const [byAppId, byAppUuid] = await Promise.all([
getAll(`app-id=${appIdOrUuid}`),
getAll(`app-uuid=${appIdOrUuid}`),
]);
const containerList = _.unionBy(byAppId, byAppUuid, 'containerId');
return _(containerList)
.keyBy('serviceName')
.mapValues('containerId')
.value() as Dictionary<string>;
}
function reportChange(containerId?: string, status?: Partial<Service>) {
if (containerId != null) {
if (status != null) {
volatileState[containerId] = { ...status };
} else if (volatileState[containerId] != null) {
delete volatileState[containerId];
}
}
events.emit('change');
}
function reportNewStatus(
containerId: string,
service: Partial<Service>,
status: ServiceStatus,
) {
reportChange(
containerId,
_.merge(
{ status },
_.pick(service, [
'imageId',
'appId',
'appUuid',
'serviceName',
'releaseId',
'createdAt',
'commit',
]),
),
);
}
function killContainer(
containerId: string,
service: Partial<Service> = {},
{ removeContainer = true, wait = false }: KillOpts = {},
): Bluebird<void> {
// To maintain compatibility of the `wait` flag, this function is not
// async, but it feels like whether or not the promise should be waited on
// should performed by the caller
// TODO: Remove the need for the wait flag
return Bluebird.try(() => {
logger.logSystemEvent(LogTypes.stopService, { service });
if (service.imageId != null) {
reportNewStatus(containerId, service, 'Stopping');
}
const containerObj = docker.getContainer(containerId);
const killPromise = Bluebird.resolve(containerObj.stop())
.then(() => {
if (removeContainer) {
return containerObj.remove({ v: true });
}
})
.catch((e) => {
// Get the statusCode from the original cause and make sure it's
// definitely an int for comparison reasons
const maybeStatusCode = PermissiveNumber.decode(e.statusCode);
if (isLeft(maybeStatusCode)) {
throw new Error(
`Could not parse status code from docker error: ${e}`,
);
}
const statusCode = maybeStatusCode.right;
// 304 means the container was already stopped, so we can just remove it
if (statusCode === 304) {
logger.logSystemEvent(LogTypes.stopServiceNoop, { service });
// Why do we attempt to remove the container again?
if (removeContainer) {
return containerObj.remove({ v: true });
}
} else if (statusCode === 404) {
// 404 means the container doesn't exist, precisely what we want!
logger.logSystemEvent(LogTypes.stopRemoveServiceNoop, {
service,
});
} else {
throw e;
}
})
.tap(() => {
delete containerHasDied[containerId];
logger.logSystemEvent(LogTypes.stopServiceSuccess, { service });
})
.catch((e) => {
logger.logSystemEvent(LogTypes.stopServiceError, {
service,
error: e,
});
})
.finally(() => {
if (service.imageId != null) {
reportChange(containerId);
}
});
if (wait) {
return killPromise;
}
return;
});
}
async function listWithBothLabels(
labelList: string[],
): Promise<Dockerode.ContainerInfo[]> {
const listWithPrefix = (prefix: string) =>
docker.listContainers({
all: true,
filters: {
label: _.map(labelList, (v) => `${prefix}${v}`),
},
});
const [legacy, current] = await Promise.all([
listWithPrefix('io.resin.'),
listWithPrefix('io.balena.'),
]);
return _.unionBy(legacy, current, 'Id');
}
async function prepareForHandover(service: Service) {
const svc = await get(service);
if (svc.containerId == null) {
throw new InternalInconsistencyError(
`No containerId provided for service ${service.serviceName} in ServiceManager.prepareForHandover. Service: ${service}`,
);
}
const container = docker.getContainer(svc.containerId);
await container.update({ RestartPolicy: {} });
return await container.rename({
name: `old_${service.serviceName}_${service.imageId}_${service.releaseId}_${service.commit}`,
});
}
function waitToKill(service: Service, timeout: number | string) {
const pollInterval = 100;
timeout = checkInt(timeout, { positive: true }) || 60000;
const deadline = Date.now() + timeout;
const handoverCompletePaths = service.handoverCompleteFullPathsOnHost();
const wait = (): Bluebird<void> =>
Bluebird.any(
handoverCompletePaths.map((file) =>
fs.stat(file).then(() => fs.unlink(file).catch(_.noop)),
),
).catch(async () => {
if (Date.now() < deadline) {
await Bluebird.delay(pollInterval);
return wait();
} else {
log.info(
`Handover timeout has passed, assuming handover was completed for service ${service.serviceName}`,
);
}
});
log.info(
`Waiting for handover to be completed for service: ${service.serviceName}`,
);
return wait().then(() => {
log.success(`Handover complete for service ${service.serviceName}`);
});
} | the_stack |
import dialogPolyfill from "dialog-polyfill";
import {Command, Help, PluginHelp} from "../api/help";
import {getParameterByName} from '../common/urls';
declare const allHelp: Help;
function redrawOptions(): void {
const rs = allHelp.AllRepos.sort();
const sel = document.getElementById("repo") as HTMLSelectElement;
while (sel.length > 1) {
sel.removeChild(sel.lastChild!);
}
const param = getParameterByName("repo");
rs.forEach((opt) => {
const o = document.createElement("option");
o.text = opt;
o.selected = !!(param && opt === param);
sel.appendChild(o);
});
}
window.onload = (): void => {
// set dropdown based on options from query string
const hash = window.location.hash;
redrawOptions();
redraw();
// Register dialog
const dialog = document.querySelector('dialog') as HTMLDialogElement;
dialogPolyfill.registerDialog(dialog);
dialog.querySelector('.close')!.addEventListener('click', () => {
dialog.close();
});
if (hash !== "") {
const el = document.body.querySelector(hash);
const mainContainer = document.body.querySelector(".mdl-layout__content");
if (el && mainContainer) {
setTimeout(() => {
mainContainer.scrollTop = el.getBoundingClientRect().top;
window.location.hash = hash;
}, 32);
(el.querySelector(".mdl-button--primary") as HTMLButtonElement).click();
}
}
};
function selectionText(sel: HTMLSelectElement): string {
return sel.selectedIndex === 0 ? "" : sel.options[sel.selectedIndex].text;
}
/**
* Takes an org/repo string and a repo to plugin map and returns the plugins
* that apply to the repo.
* @param repoSel repo name
* @param repoPlugins maps plugin name to plugin
*/
function applicablePlugins(repoSel: string, repoPlugins: {[key: string]: string[]}): string[] {
if (repoSel === "") {
const all = repoPlugins[""];
if (all) {
return all.sort();
}
return [];
}
const parts = repoSel.split("/");
const byOrg = repoPlugins[parts[0]];
let plugins: string[] = [];
if (byOrg && byOrg !== []) {
plugins = plugins.concat(byOrg);
}
const pluginNames = repoPlugins[repoSel];
if (pluginNames) {
pluginNames.forEach((pluginName) => {
if (!plugins.includes(pluginName)) {
plugins.push(pluginName);
}
});
}
return plugins.sort();
}
/**
* Returns a normal cell for the command row.
* @param data content of the cell
* @param styles a list of styles applied to the cell.
* @param noWrap true if the content of the cell should be wrap.
*/
function createCommandCell(data: string | string[], styles: string[] = [], noWrap = false): HTMLTableDataCellElement {
const cell = document.createElement("td");
cell.classList.add("mdl-data-table__cell--non-numeric");
if (!noWrap) {
cell.classList.add("table-cell");
}
let content: HTMLElement;
if (Array.isArray(data)) {
content = document.createElement("ul");
content.classList.add("command-example-list");
data.forEach((item) => {
const itemContainer = document.createElement("li");
const span = document.createElement("span");
span.innerHTML = item;
span.classList.add(...styles);
itemContainer.appendChild(span);
content.appendChild(itemContainer);
});
} else {
content = document.createElement("div");
content.classList.add(...styles);
content.innerHTML = data;
}
cell.appendChild(content);
return cell;
}
/**
* Returns an icon element.
* @param no no. command
* @param iconString icon name
* @param styles list of styles of the icon
* @param tooltip tooltip string
* @param isButton true if icon is a button
*/
function createIcon(no: number, iconString: string, styles: string[], tooltip: string, isButton?: false): HTMLDivElement;
function createIcon(no: number, iconString: string, styles: string[], tooltip: string, isButton?: true): HTMLButtonElement;
function createIcon(no: number, iconString: string, styles: string[] = [], tooltip: string = "", isButton = false) {
const icon = document.createElement("i");
icon.id = `icon-${iconString}-${no}`;
icon.classList.add("material-icons");
icon.classList.add(...styles);
icon.innerHTML = iconString;
const container = isButton ? document.createElement("button") : document.createElement("div");
container.appendChild(icon);
if (isButton) {
container.classList.add(...["mdl-button", "mdl-js-button", "mdl-button--icon"]);
}
if (tooltip === "") {
return container;
}
const tooltipEl = document.createElement("div");
tooltipEl.setAttribute("for", icon.id);
tooltipEl.classList.add("mdl-tooltip");
tooltipEl.innerHTML = tooltip;
container.appendChild(tooltipEl);
return container;
}
/**
* Returns the feature cell for the command row.
* @param isFeatured true if the command is featured.
* @param isExternal true if the command is external.
* @param no no. command.
*/
function commandStatus(isFeatured: boolean, isExternal: boolean, no: number): HTMLTableDataCellElement {
const status = document.createElement("td");
status.classList.add("mdl-data-table__cell--non-numeric");
if (isFeatured) {
status.appendChild(
createIcon(no, "stars", ["featured-icon"], "Featured command"));
}
if (isExternal) {
status.appendChild(
createIcon(no, "open_in_new", ["external-icon"], "External plugin"));
}
return status;
}
/**
* Returns a section to the content of the dialog
* @param title title of the section
* @param body body of the section
*/
function addDialogSection(title: string, body: string): HTMLElement {
const container = document.createElement("div");
const sectionTitle = document.createElement("h5");
const sectionBody = document.createElement("p");
sectionBody.classList.add("dialog-section-body");
sectionBody.innerHTML = body;
sectionTitle.classList.add("dialog-section-title");
sectionTitle.innerHTML = title;
container.classList.add("dialog-section");
container.appendChild(sectionTitle);
container.appendChild(sectionBody);
return container;
}
/**
* Returns a cell for the Plugin column.
* @param repo repo name
* @param pluginName plugin name.
* @param plugin the plugin to which the command belong to
*/
function createPluginCell(repo: string, pluginName: string, plugin: PluginHelp): HTMLTableDataCellElement {
const pluginCell = document.createElement("td");
const button = document.createElement("button");
pluginCell.classList.add("mdl-data-table__cell--non-numeric");
button.classList.add("mdl-button", "mdl-button--js", "mdl-button--primary");
button.innerHTML = pluginName;
// Attach Event Handlers.
const dialog = document.querySelector('dialog') as HTMLDialogElement;
button.addEventListener('click', () => {
const title = dialog.querySelector(".mdl-dialog__title")!;
const content = dialog.querySelector(".mdl-dialog__content")!;
while (content.firstChild) {
content.removeChild(content.firstChild);
}
title.innerHTML = pluginName;
if (plugin.Description) {
content.appendChild(addDialogSection("Description", plugin.Description));
}
if (plugin.Events) {
const sectionContent = `[${plugin.Events.sort().join(", ")}]`;
content.appendChild(addDialogSection("Events handled", sectionContent));
}
if (plugin.Config) {
const sectionContent = plugin.Config ? plugin.Config[repo] : "";
const sectionTitle =
repo === "" ? "Configuration(global)" : `Configuration(${repo})`;
if (sectionContent && sectionContent !== "") {
content.appendChild(addDialogSection(sectionTitle, sectionContent));
}
}
dialog.showModal();
});
pluginCell.appendChild(button);
return pluginCell;
}
/**
* Creates a link that links to the command.
*/
function createCommandLink(name: string, no: number): HTMLTableDataCellElement {
const link = document.createElement("td");
const iconButton = createIcon(no, "link", ["link-icon"], "", true);
iconButton.addEventListener("click", () => {
const tempInput = document.createElement("input");
let url = window.location.href;
const hashIndex = url.indexOf("#");
if (hashIndex !== -1) {
url = url.slice(0, hashIndex);
}
url += "#" + name;
tempInput.style.zIndex = "-99999";
tempInput.style.background = "transparent";
tempInput.value = url;
document.body.appendChild(tempInput);
tempInput.select();
document.execCommand("copy");
document.body.removeChild(tempInput);
const toast = document.body.querySelector("#toast")! as SnackbarElement;
toast.MaterialSnackbar.showSnackbar({message: "Copied to clipboard"});
});
link.appendChild(iconButton);
link.classList.add("mdl-data-table__cell--non-numeric");
return link;
}
/**
* Creates a row for the Command table.
* @param repo repo name.
* @param pluginName plugin name.
* @param plugin the plugin to which the command belongs.
* @param command the command.
* @param isExternal true if the command belongs to an external
* @param no no. command
*/
function createCommandRow(repo: string, pluginName: string, plugin: PluginHelp, command: Command, isExternal: boolean, no: number): HTMLTableRowElement {
const row = document.createElement("tr");
const name = extractCommandName(command.Examples[0]);
row.id = name;
row.appendChild(commandStatus(command.Featured, isExternal, no));
row.appendChild(createCommandCell(command.Usage, ["command-usage"]));
row.appendChild(
createCommandCell(command.Examples, ["command-examples"], true));
row.appendChild(
createCommandCell(command.Description, ["command-desc-text"]));
row.appendChild(createCommandCell(command.WhoCanUse, ["command-desc-text"]));
row.appendChild(createPluginCell(repo, pluginName, plugin));
row.appendChild(createCommandLink(name, no));
return row;
}
/**
* Redraw a plugin table.
* @param repo repo name.
* @param helpMap maps a plugin name to a plugin.
*/
function redrawHelpTable(repo: string, helpMap: Map<string, {isExternal: boolean, plugin: PluginHelp}>): void {
const table = document.getElementById("command-table")!;
const tableBody = document.querySelector("tbody")!;
if (helpMap.size === 0) {
table.style.display = "none";
return;
}
table.style.display = "table";
while (tableBody.childElementCount !== 0) {
tableBody.removeChild(tableBody.firstChild!);
}
const names = Array.from(helpMap.keys());
const commandsWithPluginName: Array<{pluginName: string, command: Command}> = [];
for (const name of names) {
helpMap.get(name)!.plugin.Commands.forEach((command) => {
commandsWithPluginName.push({
command,
pluginName: name,
});
});
}
commandsWithPluginName
.sort((command1, command2) => {
return command1.command.Featured ? -1 : command2.command.Featured ? 1 : 0;
})
.forEach((command, index) => {
const pluginName = command.pluginName;
const {isExternal, plugin} = helpMap.get(pluginName)!;
const commandRow = createCommandRow(
repo,
pluginName,
plugin,
command.command,
isExternal,
index);
tableBody.appendChild(commandRow);
});
}
/**
* Redraws the content of the page.
*/
function redraw(): void {
const repoSel = selectionText(document.getElementById("repo") as HTMLSelectElement);
if (window.history && window.history.replaceState !== undefined) {
if (repoSel !== "") {
history.replaceState(null, "", "/command-help?repo="
+ encodeURIComponent(repoSel));
} else {
history.replaceState(null, "", "/command-help");
}
}
redrawOptions();
const pluginsWithCommands: Map<string, {isExternal: boolean, plugin: PluginHelp}> = new Map();
applicablePlugins(repoSel, allHelp.RepoPlugins)
.forEach((name) => {
if (allHelp.PluginHelp[name] && allHelp.PluginHelp[name].Commands) {
pluginsWithCommands.set(
name,
{
isExternal: false,
plugin: allHelp.PluginHelp[name],
});
}
});
applicablePlugins(repoSel, allHelp.RepoExternalPlugins)
.forEach((name) => {
if (allHelp.ExternalPluginHelp[name]
&& allHelp.ExternalPluginHelp[name].Commands) {
pluginsWithCommands.set(
name,
{
isExternal: true,
plugin: allHelp.ExternalPluginHelp[name],
});
}
});
redrawHelpTable(repoSel, pluginsWithCommands);
}
/**
* Extracts a command name from a command example. It takes the first example,
* with out the slash, as the name for the command. Also, any '-' character is
* replaced by '_' to make the name valid in the address.
*/
function extractCommandName(commandExample: string): string {
const command = commandExample.split(" ");
if (!command || command.length === 0) {
throw new Error("Cannot extract command name.");
}
return command[0].slice(1).split("-").join("_");
}
// This is referenced by name in the HTML.
(window as any).redraw = redraw; | the_stack |
import { BaseModule, EventChannel, SylApi, Types } from '@syllepsis/adapter';
import debounce from 'lodash.debounce';
import throttle from 'lodash.throttle';
import { AllSelection, NodeSelection } from 'prosemirror-state';
import { EditorView } from 'prosemirror-view';
import { IRenderer } from '../../../../renderer';
import { IToolbarOption } from '../../..';
import { ToolbarLib } from '../..';
declare module '@syllepsis/adapter' {
interface ISylApiCommand {
toolbarInline?: {
show: () => void;
hide: () => void;
getVisible: () => boolean;
enable: () => void;
disable: () => void;
getEnable: () => boolean;
};
}
}
interface IToolbarInlineOption extends Omit<IToolbarOption, 'mount' | 'showNames'> {
threshold?: { top?: number; left?: number; right?: number; bottom?: number }; // min thread
judgeShow?: (editor: SylApi) => boolean;
zIndex?: number;
}
interface IToolbarInlineProps {
editor: SylApi;
option: IToolbarInlineOption;
visible: boolean;
activeFormat: Types.StringMap<any>;
toolbarLib: ToolbarLib;
}
const IGNORE_CLOSE_ATTRIBUTE = 'data-syl-toolbar';
// needs special treatment of rect, otherwise the value of top and bottom are the same
const getPosRect = (view: EditorView, pos: number) => {
// CellSelection
// @ts-ignore
if (view.state.selection.$anchorCell) {
return (view.domAtPos(pos).node as HTMLElement).getBoundingClientRect();
}
return view.coordsAtPos(pos);
};
/**
* - positioning rules of InlineToolbar
* + vertical position is based on the selection area, horizon position is based on mouse position
* + display when there is text content, and display it at the top of the selection area first
* + When selecting across rows, it will be displayed above or below according to the position where the mouse is released
* + Drag to select. When the position where the mouse is released is too far from the selection, it will be displayed at the position where the mouse is released first, and keep the left and right positions
* + in other cases where there is a selection area but no `InlineToolbar`, move the mouse to appear, centered on the mouse position
*/
class ToolbarInlineLoader extends BaseModule<IToolbarInlineOption> {
public bridge: IRenderer<IToolbarInlineProps>;
private dom: HTMLElement;
private _visible = true;
private mousedown = false;
private preferDir: 'up' | 'down' = 'up'; // stay above or below the selection
private preferLeft: 'fixed' | 'auto' = 'auto'; // whether to recalculate the left position or fixed
private _isEnable = true;
// stored click element because document.activeElement not works well in safari
private lastClickElement: Element | null = null;
get isEnable() {
return this._isEnable;
}
set isEnable(val) {
if (val === this._isEnable) return;
if (!val) this.visible = false;
this._isEnable = val;
}
get visible() {
return this._visible;
}
set visible(val) {
if (!val && this._visible === val) return;
this._visible = val;
const newProps: Partial<IToolbarInlineProps> = { visible: val };
if (val) {
this.dom.style.display = 'block';
newProps.activeFormat = this.updateFormat(false) as Types.StringMap<any>;
// synchronize the displayed position when the state changes
this.adapter.on(EventChannel.LocalEvent.ON_CHANGE, this.tracePos);
this.adapter.view.dom.removeEventListener('mousemove', this.checkShow);
} else {
this.dom.style.display = 'none';
this.adapter.off(EventChannel.LocalEvent.ON_CHANGE, this.tracePos);
// in the case of undo, redo, paste, etc., determine whether it needs to be displayed
this.adapter.view.dom.addEventListener('mousemove', this.checkShow);
}
// render the dom first, then update the props for achieve the animation
requestAnimationFrame(() => this.bridge.setProps(newProps));
}
get threshold(): Required<Required<IToolbarInlineOption>['threshold']> {
return {
top: 0,
left: 0,
right: 0,
bottom: 0,
...(this.option.threshold || {}),
};
}
constructor(adapter: SylApi, originOption: IToolbarInlineOption) {
super(adapter, { ...originOption });
const option = { ...originOption };
this.option = option;
this.dom = document.createElement('div');
adapter.root.appendChild(this.dom);
this.dom.style.position = 'absolute';
this.dom.style.userSelect = 'none';
this.dom.style.display = 'none';
this.dom.style.zIndex = `${option.zIndex || 100}`;
this.visible = false;
this.bridge = new option.RenderBridge(adapter, option.Component, this.dom, 'toolbarInline');
this.bindEvent();
this.render();
adapter.addCommand('toolbarInline', {
show: () => this.tracePos(undefined, true),
hide: this.hide,
getVisible: () => this.visible,
getEnable: () => this.isEnable,
enable: this.enable,
disable: this.disable,
});
}
enable = () => {
this.isEnable = true;
};
disable = () => {
this.isEnable = false;
};
updateFormat(exec = true) {
const activeFormat = this.adapter.getFormat();
return exec ? this.bridge.setProps({ activeFormat }) : activeFormat;
}
show = () => {
this.visible = true;
};
hide = () => {
this.visible = false;
};
handleMouseUp = (e: any) => {
// only handle the case where the mouse is pressed in the editor
if (!this.mousedown) return;
this.mousedown = false;
this.tracePos(e, true);
};
handleMouseDown = (e: Event) => {
const button = (e as MouseEvent).button;
if (button !== 2) {
this.mousedown = true;
this.hide();
}
};
tracePos = debounce((e?: MouseEvent, force?: boolean) => {
if (!this.isEnable || !this.adapter.editable) {
if (this.visible) this.visible = false;
return;
}
if (this.mousedown) return;
const { view } = this.adapter;
const { state } = view;
const { from, to, empty } = state.selection;
if (
empty ||
!this.adapter.getText({ index: from, length: to - from }).trim() ||
this.adapter.view.state.selection instanceof NodeSelection ||
(this.option.judgeShow && !this.option.judgeShow(this.adapter))
) {
this.hide();
return;
}
const visible = this.visible;
const dir = visible ? this.preferDir : undefined;
this.show();
this.dom.style.visibility = 'hidden';
// calculate the position based on the width and height and the position on the screen, remember to consider the position of the editor itself
requestAnimationFrame(() => {
let { $head, $anchor } = state.selection;
if (state.selection instanceof AllSelection) {
$anchor = state.doc.resolve(1);
let endPos = state.doc.nodeSize - 1;
while (endPos--) {
$head = state.doc.resolve(endPos);
if ($head.node().isTextblock) break;
}
// display at the top when in table
// @ts-ignore
} else if (state.selection.$anchorCell) {
const ranges = state.selection.ranges.sort((a, b) => a.$from.pos - b.$from.pos);
$anchor = ranges[0].$from;
$head = ranges[0].$to;
}
const { top: thresholdTop, left: thresholdLeft, right: thresholdRight, bottom: thresholdBottom } = this.threshold;
const { top: headTop, left: headLeft, bottom: headBottom } = getPosRect(view, $head.pos);
const { bottom: anchorBottom, top: anchorTop } = getPosRect(view, $anchor.pos);
const topPos = Math.min(headTop, anchorTop); // the top position of the selection area
const bottomPos = Math.max(headBottom, anchorBottom); // the bottom position of the selection area
const { offsetHeight, offsetWidth, offsetLeft } = this.dom;
const domOffsetLeft = offsetWidth / 2;
const { top: editorTop, left: editorLeft, right: editorRight } = this.adapter.root.getBoundingClientRect();
const maxTop = window.innerHeight - offsetHeight - thresholdBottom;
const minLeft = thresholdLeft;
const maxLeft = editorRight - editorLeft - offsetWidth - thresholdRight; // The largest left, related to `thresholdRight`
const defaultLeft = headLeft - domOffsetLeft - editorLeft; // align the left of the head position in the center
let computedTop = topPos - offsetHeight - 8;
this.preferDir = 'up';
// less than the `thresholdTop`, when selected across rows and not displayed, it will be judged according to the position of the head and anchor and displayed at the bottom
if (
computedTop < thresholdTop ||
(dir !== 'up' && Math.abs(anchorBottom - headBottom) > 12 && $head.pos > $anchor.pos)
) {
computedTop = bottomPos + 8;
this.preferDir = 'down';
}
// exceed the `maxTop`, displayed on the top
if (computedTop > maxTop) {
computedTop = topPos - offsetHeight - 8;
this.preferDir = 'up';
}
this.dom.style.top = `${computedTop - editorTop}px`;
// MouseEvent or directly invoke
if (e instanceof Event || !visible) {
let target: any;
let pageX: any;
if (e instanceof Event) {
target = e.target;
pageX = e.pageX;
} else {
target = this.adapter.view.dom;
pageX = domOffsetLeft;
}
if (this.adapter.view.dom.contains(target as HTMLElement)) {
const mouseLeft = (visible ? offsetLeft + editorLeft : pageX - domOffsetLeft) - editorLeft; // mouse left position, if it is visible at this time, it should remain
let computedLeft = defaultLeft;
this.preferLeft = 'auto';
// determine whether to use the cursor position or the mouse position(magic-number)
if (Math.abs(mouseLeft - defaultLeft) >= 20) {
computedLeft = mouseLeft;
this.preferLeft = 'fixed';
}
if (computedLeft >= maxLeft) {
this.preferLeft = 'auto';
computedLeft = maxLeft;
} else if (computedLeft <= minLeft) {
this.preferLeft = 'auto';
computedLeft = minLeft;
}
this.dom.style.left = `${computedLeft}px`;
}
} else if (force || this.preferLeft === 'auto') {
this.preferLeft = 'auto';
// force to adjust the position when refreshing, indenting, and undoing
this.dom.style.left = `${Math.min(maxLeft, Math.max(minLeft, defaultLeft))}px`;
}
this.dom.style.visibility = '';
});
}, 20);
// in other cases, check whether the toolbar needs to be displayed
checkShow = throttle((e: any) => {
if (this.visible || !this.adapter.isFocused) {
return;
}
this.tracePos(e);
}, 20);
checkHide = debounce(() => {
if (
!this.adapter.isFocused &&
this.lastClickElement &&
!this.dom.contains(this.lastClickElement) &&
!this.lastClickElement.closest(IGNORE_CLOSE_ATTRIBUTE)
) {
this.hide();
}
}, 300);
storedClick = (e: MouseEvent) => (this.lastClickElement = e.target as Element);
bindEvent() {
document.body.addEventListener('mouseup', this.handleMouseUp);
document.body.addEventListener('click', this.storedClick);
this.adapter.view.dom.addEventListener('mousedown', this.handleMouseDown);
// do not trigger `mouseUp` when drop
this.adapter.root.addEventListener('drop', this.handleMouseUp);
this.adapter.on(EventChannel.LocalEvent.ON_BLUR, this.checkHide);
this.adapter.on(EventChannel.LocalEvent.CONFIG_PLUGIN_CHANGE, this.render);
}
public setProps(option: IToolbarInlineOption) {
this.option = { ...this.option, ...option };
this.render();
}
public render = () => {
this.bridge.setProps({
editor: this.adapter,
option: this.option,
visible: this.visible,
activeFormat: this.adapter.getFormat(),
toolbarLib: new ToolbarLib({ editor: this.adapter, option: this.option }),
});
};
public destructor() {
document.body.removeEventListener('mouseup', this.handleMouseUp);
document.body.removeEventListener('click', this.storedClick);
this.adapter.view.dom.removeEventListener('mousedown', this.handleMouseDown);
this.adapter.off(EventChannel.LocalEvent.ON_BLUR, this.checkHide);
this.adapter.off(EventChannel.LocalEvent.CONFIG_PLUGIN_CHANGE, this.render);
this.adapter.root.removeEventListener('drop', this.handleMouseUp);
this.adapter.root.removeChild(this.dom);
this.bridge.unmount();
}
}
export { IToolbarInlineOption, IToolbarInlineProps, ToolbarInlineLoader }; | the_stack |
import { Schema } from '@cardano-ogmios/client'
import { exec } from 'child_process'
import util, { DataFetcher, ModuleState } from '@cardano-graphql/util'
import fetch from 'cross-fetch'
import { DocumentNode, GraphQLSchema, print } from 'graphql'
import { GraphQLClient, gql } from 'graphql-request'
import { introspectSchema, wrapSchema } from '@graphql-tools/wrap'
import pRetry from 'p-retry'
import path from 'path'
import {
AdaPots,
Asset,
AssetBalance,
AssetSupply,
Block,
PaymentAddressSummary,
ShelleyProtocolParams,
Token,
TransactionOutput
} from './graphql_types'
import { dummyLogger, Logger } from 'ts-log'
import BigNumber from 'bignumber.js'
import {
AssetMetadataAndHash,
AssetMetadataHashAndId,
AssetWithoutTokens
} from './typeAliases'
export type AdaPotsToCalculateSupply = { circulating: AssetSupply['circulating'], reserves: AdaPots['reserves']}
const epochInformationNotYetAvailable = 'Epoch information not yet available. This is expected during the initial chain-sync.'
const withHexPrefix = (value: string) => `\\x${value !== undefined ? value : ''}`
export class HasuraClient {
private client: GraphQLClient
private applyingSchemaAndMetadata: boolean
public adaPotsToCalculateSupplyFetcher: DataFetcher<AdaPotsToCalculateSupply>
private state: ModuleState
public schema: GraphQLSchema
constructor (
readonly hasuraCliPath: string,
readonly hasuraUri: string,
pollingInterval: number,
readonly lastConfiguredMajorVersion: number, // Todo: Depreciate
private logger: Logger = dummyLogger
) {
this.state = null
this.applyingSchemaAndMetadata = false
this.adaPotsToCalculateSupplyFetcher = new DataFetcher<AdaPotsToCalculateSupply>(
'AdaPotsToCalculateSupply',
() => {
try {
return this.getAdaPotsToCalculateSupply()
} catch (error) {
if (error.message !== epochInformationNotYetAvailable) {
console.debug(error.message)
}
this.logger.trace({ err: error })
}
},
pollingInterval,
this.logger
)
this.client = new GraphQLClient(
`${this.hasuraUri}/v1/graphql`,
{
headers: {
'X-Hasura-Role': 'cardano-graphql'
}
}
)
}
private async getAdaPotsToCalculateSupply (): Promise<AdaPotsToCalculateSupply> {
const result = await this.client.request(
gql`query {
epochs (limit: 1, order_by: { number: desc }) {
adaPots {
reserves
}
}
rewards_aggregate {
aggregate {
sum {
amount
}
}
}
utxos_aggregate {
aggregate {
sum {
value
}
}
}
withdrawals_aggregate {
aggregate {
sum {
amount
}
}
}
}`
)
const {
epochs,
rewards_aggregate: rewardsAggregate,
utxos_aggregate: utxosAggregate,
withdrawals_aggregate: withdrawalsAggregate
} = result
if (epochs.length === 0 || epochs[0]?.adaPots === null) {
this.logger.debug({ module: 'HasuraClient' }, epochInformationNotYetAvailable)
throw new Error(epochInformationNotYetAvailable)
}
const rewards = new BigNumber(rewardsAggregate.aggregate.sum.amount)
const utxos = new BigNumber(utxosAggregate.aggregate.sum.value)
const withdrawals = new BigNumber(withdrawalsAggregate.aggregate.sum.amount)
const withdrawableRewards = rewards.minus(withdrawals)
return {
circulating: utxos.plus(withdrawableRewards).toString(),
reserves: epochs[0]?.adaPots.reserves
}
}
private async hasuraCli (command: string) {
return new Promise((resolve, reject) => {
exec(
`${this.hasuraCliPath} --skip-update-check --project ${path.resolve(__dirname, '..', 'hasura', 'project')} --endpoint ${this.hasuraUri} ${command}`,
(error, stdout) => {
if (error) {
reject(error)
}
this.logger.debug({ module: 'HasuraClient' }, stdout)
resolve()
}
)
})
}
public async initialize () {
if (this.state !== null) return
this.state = 'initializing'
this.logger.info({ module: 'HasuraClient' }, 'Initializing')
await this.applySchemaAndMetadata()
await pRetry(async () => {
this.schema = await this.buildHasuraSchema()
}, {
factor: 1.75,
retries: 9,
onFailedAttempt: util.onFailedAttemptFor(
'Fetching Hasura schema via introspection',
this.logger
)
})
this.logger.debug({ module: 'HasuraClient' }, 'graphql-engine setup')
await pRetry(async () => {
const result = await this.client.request(
gql`query {
epochs (limit: 1, order_by: { number: desc }) {
number
}
}`
)
if (result.epochs.length === 0) {
this.logger.debug({ module: 'HasuraClient' }, epochInformationNotYetAvailable)
throw new Error(epochInformationNotYetAvailable)
}
}, {
factor: 1.05,
retries: 100,
onFailedAttempt: util.onFailedAttemptFor(
'Detecting DB sync state has reached minimum progress',
this.logger
)
})
this.logger.debug({ module: 'HasuraClient' }, 'DB sync state has reached minimum progress')
await this.adaPotsToCalculateSupplyFetcher.initialize()
this.state = 'initialized'
this.logger.info({ module: 'HasuraClient' }, 'Initialized')
}
public async shutdown () {
await this.adaPotsToCalculateSupplyFetcher.shutdown()
}
public async applySchemaAndMetadata (): Promise<void> {
if (this.applyingSchemaAndMetadata) return
this.applyingSchemaAndMetadata = true
await pRetry(async () => {
await this.hasuraCli('migrate apply --down all')
await this.hasuraCli('migrate apply --up all')
}, {
factor: 1.75,
retries: 9,
onFailedAttempt: util.onFailedAttemptFor(
'Applying PostgreSQL schema migrations',
this.logger
)
})
await pRetry(async () => {
await this.hasuraCli('metadata clear')
await this.hasuraCli('metadata apply')
}, {
factor: 1.75,
retries: 9,
onFailedAttempt: util.onFailedAttemptFor('Applying Hasura metadata', this.logger)
})
this.applyingSchemaAndMetadata = false
}
public async buildHasuraSchema () {
const executor = async ({ document, variables }: { document: DocumentNode, variables?: Object }) => {
const query = print(document)
try {
const fetchResult = await fetch(`${this.hasuraUri}/v1/graphql`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'X-Hasura-Role': 'cardano-graphql'
},
body: JSON.stringify({ query, variables })
})
return fetchResult.json()
} catch (error) {
this.logger.error({ err: error })
throw error
}
}
const coreTypes = [
'Block',
'Cardano',
'Epoch',
'Block',
'Transaction'
]
const schema = wrapSchema({
schema: await introspectSchema(executor),
executor
})
for (const t of coreTypes) {
const gqlType = schema.getType(t)
if (!gqlType) {
throw new Error(`Remote schema is missing ${t}`)
}
}
return schema
}
public async deleteAssetsAfterSlot (slotNo: Block['slotNo']): Promise<number> {
this.logger.debug(
{ module: 'HasuraClient', slotNo },
'deleting assets found in tokens after slot'
)
const result = await this.client.request(
gql`mutation DeleteAssetsAfterSlot($slotNo: Int!) {
delete_assets(
where: {
firstAppearedInSlot: {
_gt: $slotNo
}
}
) {
affected_rows
}
}`,
{
slotNo
}
)
return result.delete_assets.affected_rows
}
public async getCurrentProtocolVersion (): Promise<ShelleyProtocolParams['protocolVersion']> {
const result = await this.client.request(
gql`query {
epochs (limit: 1, order_by: { number: desc }) {
protocolParams {
protocolVersion
}
}
}`
)
return result.epochs[0]?.protocolParams.protocolVersion
}
public async getMostRecentPointWithNewAsset (): Promise<Schema.Point | null> {
let point: Schema.Point | null
// Handles possible race condition between the internal chain-follower, which manages the Asset table,
// and cardano-db-sync's which managed the block table.
await pRetry(async () => {
// An offset of 1 is applied to ensure a partial block extraction is not skipped
const result = await this.client.request(
gql`query {
assets (
limit: 1
offset: 1
order_by: { firstAppearedInBlock: { slotNo: desc }}
) {
firstAppearedInBlock {
hash
slotNo
}
}
}`
)
if (result.errors !== undefined) {
throw new Error(result.errors)
}
if (result.assets.length !== 0) {
if (result.assets[0].firstAppearedInBlock === null) {
throw new Error('cardano-db-sync is lagging behind the asset sync operation.')
}
const { hash, slotNo } = result.assets[0].firstAppearedInBlock
point = {
hash: hash.substring(2),
slot: slotNo
}
} else {
point = null
}
}, {
factor: 1.5,
retries: 1000,
onFailedAttempt: util.onFailedAttemptFor(
'Getting the most recent point with a new asset',
this.logger
)
})
return point
}
public async getPaymentAddressSummary (address: string, atBlock?: number): Promise<PaymentAddressSummary> {
const result = await this.client.request(
gql`query PaymentAddressSummary (
$address: String!
$atBlock: Int
){
utxos (
where: {
_and: {
address: { _eq: $address },
transaction: { block: { number: { _lte: $atBlock }}}
}
}
) {
value
tokens {
asset {
assetId
assetName
decimals
description
fingerprint
logo
metadataHash
name
ticker
tokenMints {
quantity
transaction {
hash
}
}
tokenMints_aggregate {
aggregate {
avg {
quantity
}
count
max {
quantity
}
min {
quantity
}
sum {
quantity
}
}
}
url
policyId
}
quantity
}
}
utxos_aggregate (
where: {
_and: {
address: { _eq: $address },
transaction: { block: { number: { _lte: $atBlock }}}
}
}
) {
aggregate {
count
}
}
}`,
{
address,
atBlock
}
)
const map = new Map<Asset['assetId'], AssetBalance>()
for (const utxo of result.utxos as TransactionOutput[]) {
if (map.has('ada')) {
const current = map.get('ada')
map.set('ada', {
...current,
...{
quantity: new BigNumber(current.quantity)
.plus(new BigNumber(utxo.value))
.toString()
}
})
} else {
map.set('ada', {
asset: {
assetId: '\\xada',
assetName: '\\xada',
name: 'ada',
policyId: '\\xada',
tokenMints: [],
tokenMints_aggregate: {
aggregate: {
avg: {
quantity: 'na'
},
count: 'na',
max: {
quantity: 'na'
},
min: {
quantity: 'na'
},
sum: {
quantity: 'na'
}
},
nodes: []
}
},
quantity: utxo.value
})
}
for (const token of utxo.tokens as Token[]) {
if (map.has(token.asset.assetId)) {
const current = map.get(token.asset.assetId)
map.set(token.asset.assetId, {
...current,
...{
quantity: new BigNumber(current.quantity)
.plus(new BigNumber(token.quantity))
.toString()
}
})
} else {
map.set(token.asset.assetId, token as unknown as AssetBalance)
}
}
}
return {
assetBalances: [...map.values()],
utxosCount: result.utxos_aggregate.aggregate.count
}
}
public async getMeta (nodeTipSlotNumber: number) {
const result = await this.client.request(
gql`query {
epochs (limit: 1, order_by: { number: desc }) {
number
}
cardano {
tip {
epoch {
number
}
slotNo
forgedAt
}
}}`
)
const { tip } = result?.cardano[0]
const lastEpoch = result?.epochs[0]
const syncPercentage = tip.slotNo / nodeTipSlotNumber * 100
return {
// cardano-db-sync writes the epoch record at the end of each epoch during times of bulk sync
// The initialization state can be determined by comparing the last epoch record against the
// tip
initialized: lastEpoch.number === tip.epoch?.number,
// we cannot assume that actual db-sync syncPercentage will be less or equal to node sync state due to race condition at the query time
syncPercentage: syncPercentage > 100 ? 100 : syncPercentage
}
}
public async hasAsset (assetId: Asset['assetId']): Promise<boolean> {
const result = await this.client.request(
gql`query HasAsset (
$assetId: bytea!
) {
assets (
where: { assetId: { _eq: $assetId }}
) {
assetId
}
}`, {
assetId: withHexPrefix(assetId)
}
)
const response = result.assets.length > 0
this.logger.debug(
{ module: 'HasuraClient', assetId, hasAsset: response },
'Has asset?'
)
return response
}
public async getAssetMetadataHashesById (assetIds: Asset['assetId'][]): Promise<AssetMetadataHashAndId[]> {
const result = await this.client.request(
gql`query AssetMetadataHashes (
$assetIds: [bytea!]!
){
assets (
where: {
assetId: { _in: $assetIds }
}) {
assetId
metadataHash
}
}`,
{
assetIds: assetIds.map(id => withHexPrefix(id))
}
)
return result.assets
}
public async addAssetMetadata (asset: AssetMetadataAndHash) {
this.logger.info(
{ module: 'HasuraClient', assetId: asset.assetId },
'Adding metadata to asset'
)
const result = await this.client.request(
gql`mutation AddAssetMetadata(
$assetId: bytea!
$decimals: Int
$description: String
$logo: String
$metadataHash: bpchar!
$name: String
$ticker: String
$url: String
) {
update_assets(
where: {
assetId: { _eq: $assetId }
},
_set: {
decimals: $decimals
description: $description
logo: $logo
metadataHash: $metadataHash
name: $name
ticker: $ticker
url: $url
}
) {
affected_rows
returning {
assetId
}
}
}`,
{
...asset,
...{ assetId: withHexPrefix(asset.assetId) }
}
)
if (result.errors !== undefined) {
throw new Error(result.errors)
}
}
public async insertAssets (assets: AssetWithoutTokens[]) {
this.logger.debug(
{ module: 'HasuraClient', qty: assets.length },
'inserting assets found in tokens'
)
const result = await this.client.request(
gql`mutation InsertAssets($assets: [Asset_insert_input!]!) {
insert_assets(objects: $assets) {
returning {
name
policyId
description
assetName
assetId
}
affected_rows
}
}`,
{
assets: assets.map(asset => ({
...asset,
...{
assetId: withHexPrefix(asset.assetId),
assetName: withHexPrefix(asset.assetName),
policyId: withHexPrefix(asset.policyId)
}
}))
}
)
return result
}
} | the_stack |
import Conditions from '../../../../../resources/conditions';
import NetRegexes from '../../../../../resources/netregexes';
import { UnreachableCode } from '../../../../../resources/not_reached';
import Outputs from '../../../../../resources/outputs';
import { callOverlayHandler } from '../../../../../resources/overlay_plugin_api';
import { Responses } from '../../../../../resources/responses';
import ZoneId from '../../../../../resources/zone_id';
import { RaidbossData } from '../../../../../types/data';
import { PluginCombatantState } from '../../../../../types/event';
import { NetMatches } from '../../../../../types/net_matches';
import { LocaleText, TriggerSet } from '../../../../../types/trigger';
export interface Data extends RaidbossData {
decOffset?: number;
firstUnknownHeadmarker?: string;
gloryOfBozjaCount?: number;
seekerFirstMercy?: NetMatches['Ability'];
seekerSwords?: string[];
calledSeekerSwords?: boolean;
splitterDist?: number;
seekerCometIds?: number[];
seekerCometData?: PluginCombatantState[];
seenHotCharge?: boolean;
hystericFlare?: boolean;
tetherIsBombslinger?: boolean;
tetherOnBomb?: boolean;
tetherOnSelf?: boolean;
weaveCount?: number;
avowedPhase?: string;
currentTemperature?: number;
currentBrand?: number;
forcedMarch?: string;
blades?: { [id: number]: string };
safeZone?: string;
unseenIds?: number[];
unseenBadRows?: number[];
unseenBadCols?: number[];
labyrinthineFate?: string;
seenLabyrinthineFate?: boolean;
queenDispelCount?: number;
}
// TODO: warnings for mines after bosses?
// TODO: headmarkers of course have a random offset here eyeroll
const headmarker = {
mercifulArc: '00F3',
burningChains: '00EE',
earthshaker: '00ED',
spitFlame1: '004F',
spitFlame2: '0050',
spitFlame3: '0051',
spitFlame4: '0052',
flare: '0057',
reversal: '00FF', // also tether 0087
spiteSmite: '0017',
wrath: '0100',
foeSplitter: '00C6',
thunder: '00A0',
edictSuccess: '0088',
edictFailure: '0089',
};
const seekerCenterX = -0.01531982;
const seekerCenterY = 277.9735;
const avowedCenterX = -272;
const avowedCenterY = -82;
// TODO: promote something like this to Conditions?
const tankBusterOnParty = (data: Data, matches: NetMatches['StartsUsing']) => {
if (matches.target === data.me)
return true;
if (data.role !== 'healer')
return false;
return data.party.inParty(matches.target);
};
// Due to changes introduced in patch 5.2, overhead markers now have a random offset
// added to their ID. This offset currently appears to be set per instance, so
// we can determine what it is from the first overhead marker we see.
const getHeadmarkerId = (data: Data, matches: NetMatches['HeadMarker']) => {
if (data.decOffset === undefined) {
// If we don't know, return garbage to avoid accidentally running other triggers.
if (!data.firstUnknownHeadmarker)
return '0000';
data.decOffset = parseInt(matches.id, 16) - parseInt(data.firstUnknownHeadmarker, 16);
}
// The leading zeroes are stripped when converting back to string, so we re-add them here.
const hexId = (parseInt(matches.id, 16) - data.decOffset).toString(16).toUpperCase();
return `000${hexId}`.slice(-4);
};
const triggerSet: TriggerSet<Data> = {
zoneId: ZoneId.DelubrumReginaeSavage,
timelineFile: 'delubrum_reginae_savage.txt',
timelineTriggers: [
{
id: 'DelubrumSav Seeker Baleful Comet',
regex: /Baleful Comet 1/,
beforeSeconds: 8,
infoText: (_data, _matches, output) => output.text!(),
outputStrings: {
text: {
// Comets have impact damage when dropping, so warn to avoid this.
en: 'Get in for comets',
de: 'Geh rein für Kometen',
fr: 'Entrez pour les comètes',
ja: '中で避ける',
cn: '中间躲避',
ko: '중앙에서 운석 맞기',
},
},
},
{
id: 'DelubrumSav Avowed Glory Of Bozja',
regex: /Glory Of Bozja(?! Enrage)/,
// Cast itself is 5.5 seconds, add more warning
beforeSeconds: 8,
// Count the number of Glory of Bozja so that people alternating mitigation
// can more easily assign themselves to even or odd glories.
preRun: (data) => data.gloryOfBozjaCount = (data.gloryOfBozjaCount ?? 0) + 1,
durationSeconds: 8,
suppressSeconds: 1,
alertText: (data, _matches, output) => output.aoeNum!({ num: data.gloryOfBozjaCount }),
outputStrings: {
aoeNum: {
en: 'Big AOE + Bleed (#${num})',
de: 'Große AoE + Blutung (#${num})',
fr: 'Grosse AoE + Saignement (#${num})',
ja: '全体攻撃 + 継続ダメージ (#${num})',
cn: '高伤AoE + DoT (#${num})',
ko: '쌘 광역 + 도트딜 (#${num})',
},
},
},
{
id: 'DelubrumSav Lord Vicious Swipe',
regex: /Vicious Swipe/,
// There are different timings in the first and second phase.
// Consistently use 5 seconds beforehand for both.
beforeSeconds: 5,
suppressSeconds: 1,
response: Responses.knockback(),
},
{
id: 'DelubrumSav Lord Thunderous Discharge',
regex: /Thunderous Discharge/,
// Cast in the timeline is 5 seconds, but there is an additional .5 second cast before damage
beforeSeconds: 7,
suppressSeconds: 1,
response: Responses.aoe(),
},
{
id: 'DelubrumSav Queen Empyrean Iniquity',
regex: /Empyrean Iniquity/,
// Cast itself is 5 seconds, add more warning
beforeSeconds: 9,
durationSeconds: 9,
suppressSeconds: 1,
response: Responses.bigAoe('alert'),
},
{
id: 'DelubrumSav Queen Gods Save The Queen',
regex: /Gods Save The Queen$/,
// Cast in the timeline is 5 seconds, but there is an additional 1 second cast before damage
beforeSeconds: 7,
durationSeconds: 5,
suppressSeconds: 1,
response: Responses.aoe(),
},
],
triggers: [
{
id: 'DelubrumSav Seeker Phase',
type: 'StartsUsing',
// Sets the phase when seeing the Verdant Tempest cast.
netRegex: NetRegexes.startsUsing({ source: 'Trinity Seeker', id: '5AD3', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Trinität Der Sucher', id: '5AD3', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Trinité Soudée', id: '5AD3', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'トリニティ・シーカー', id: '5AD3', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '求道之三位一体', id: '5AD3', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '탐구의 삼위일체', id: '5AD3', capture: false }),
// Note: this headmarker *could* be skipped, so we will change this later.
run: (data) => data.firstUnknownHeadmarker = headmarker.mercifulArc,
},
{
id: 'DelubrumSav Seeker Verdant Tempest',
type: 'StartsUsing',
netRegex: NetRegexes.startsUsing({ source: 'Trinity Seeker', id: '5AD3', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Trinität Der Sucher', id: '5AD3', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Trinité Soudée', id: '5AD3', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'トリニティ・シーカー', id: '5AD3', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '求道之三位一体', id: '5AD3', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '탐구의 삼위일체', id: '5AD3', capture: false }),
response: Responses.aoe(),
},
{
id: 'DelubrumSav Seeker Sword Cleanup',
type: 'StartsUsing',
// This is on First Mercy, which starts before the first ability.
netRegex: NetRegexes.startsUsing({ source: ['Trinity Seeker', 'Seeker Avatar'], id: '5B61', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: ['Trinität Der Sucher', 'Spaltteil Der Sucher'], id: '5B61', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: ['Trinité Soudée', 'Clone De La Trinité Soudée'], id: '5B61', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: ['トリニティ・シーカー', 'シーカーの分体'], id: '5B61', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: ['求道之三位一体', '求道之分身'], id: '5B61', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: ['탐구의 삼위일체', '탐구의 분열체'], id: '5B61', capture: false }),
run: (data) => {
delete data.seekerSwords;
delete data.calledSeekerSwords;
delete data.seekerFirstMercy;
},
},
{
id: 'DelubrumSav Seeker First Mercy',
type: 'Ability',
netRegex: NetRegexes.abilityFull({ source: ['Trinity Seeker', 'Seeker Avatar'], id: '5B61' }),
netRegexDe: NetRegexes.abilityFull({ source: ['Trinität Der Sucher', 'Spaltteil Der Sucher'], id: '5B61' }),
netRegexFr: NetRegexes.abilityFull({ source: ['Trinité Soudée', 'Clone De La Trinité Soudée'], id: '5B61' }),
netRegexJa: NetRegexes.abilityFull({ source: ['トリニティ・シーカー', 'シーカーの分体'], id: '5B61' }),
netRegexCn: NetRegexes.abilityFull({ source: ['求道之三位一体', '求道之分身'], id: '5B61' }),
netRegexKo: NetRegexes.abilityFull({ source: ['탐구의 삼위일체', '탐구의 분열체'], id: '5B61' }),
run: (data, matches) => data.seekerFirstMercy = matches,
},
{
id: 'DelubrumSav Seeker Mercy Swords',
type: 'GainsEffect',
netRegex: NetRegexes.gainsEffect({ target: ['Trinity Seeker', 'Seeker Avatar'], effectId: '808' }),
netRegexDe: NetRegexes.gainsEffect({ target: ['Trinität Der Sucher', 'Spaltteil Der Sucher'], effectId: '808' }),
netRegexFr: NetRegexes.gainsEffect({ target: ['Trinité Soudée', 'Clone De La Trinité Soudée'], effectId: '808' }),
netRegexJa: NetRegexes.gainsEffect({ target: ['トリニティ・シーカー', 'シーカーの分体'], effectId: '808' }),
netRegexCn: NetRegexes.gainsEffect({ target: ['求道之三位一体', '求道之分身'], effectId: '808' }),
netRegexKo: NetRegexes.gainsEffect({ target: ['탐구의 삼위일체', '탐구의 분열체'], effectId: '808' }),
condition: (data) => !data.calledSeekerSwords,
durationSeconds: 10,
alertText: (data, matches, output) => {
data.seekerSwords ??= [];
data.seekerSwords.push(matches.count.toUpperCase());
if (data.seekerSwords.length <= 1 || data.seekerSwords.length >= 4)
return;
if (!data.seekerFirstMercy) {
console.error(`Swords: missing first mercy`);
return;
}
const posX = parseFloat(data.seekerFirstMercy.x) - seekerCenterX;
const posY = parseFloat(data.seekerFirstMercy.y) - seekerCenterY;
const isClone = Math.hypot(posX, posY) > 10;
// 0 = N, 1 = E, etc
const pos = Math.round(2 - 2 * Math.atan2(posX, posY) / Math.PI) % 4;
const heading = Math.round(2 - 2 * parseFloat(data.seekerFirstMercy.heading) / Math.PI) % 4;
const cleaves = data.seekerSwords;
// For boss, rotate so that front = cardinal north.
// For clones, rotate so that front/north = out.
const rotateDir = (dir: number) => (4 + dir - (isClone ? pos : 0) + heading) % 4;
// Seen two cleaves, is this enough information to call??
// If no, we will wait until we have seen the third.
if (data.seekerSwords.length === 2) {
// Named constants for readability.
const dir = { north: 0, east: 1, south: 2, west: 3 };
// Find boss-relative safe zones.
const cleavetoSafeZones: { [cleave: string]: number[] } = {
// Front right cleave.
F7: [dir.south, dir.west],
// Back right cleave.
F8: [dir.west, dir.north],
// Front left cleave.
F9: [dir.east, dir.south],
// Back left cleave.
FA: [dir.north, dir.east],
};
const cleave0 = cleaves[0];
const cleave1 = cleaves[1];
if (cleave0 === undefined || cleave1 === undefined)
throw new UnreachableCode();
const first = cleavetoSafeZones[cleave0];
const second = cleavetoSafeZones[cleave1];
if (first === undefined || second === undefined)
throw new UnreachableCode();
const intersect = first.filter((safe) => second.includes(safe));
if (intersect.length === 2) {
console.error(`Sword: weird intersect: ${JSON.stringify(data.seekerSwords)}`);
return;
}
// This is a bad pattern. Need to wait for three swords.
if (intersect.length === 0)
return;
const singleSafeZone = intersect[0];
if (singleSafeZone === undefined)
throw new UnreachableCode();
const cardinal = rotateDir(singleSafeZone);
if (isClone) {
// Trinity Seeker has a lot of limbs and people have a VERY hard time with
// left vs right at the best of times. Use "in and out" here on the clone
// to make sure this doesn't get messed up. This may mean that there is a
// simpler left->right pattern that could be called, but we're ignoring it
// for clarity of communication.
if (cardinal === dir.north) {
data.calledSeekerSwords = true;
return output.double!({ dir1: output.out!(), dir2: output.in!() });
} else if (cardinal === dir.south) {
data.calledSeekerSwords = true;
return output.double!({ dir1: output.in!(), dir2: output.out!() });
}
// We'll call it the hard way.
return;
}
data.calledSeekerSwords = true;
if (cardinal === dir.north)
return output.double!({ dir1: output.north!(), dir2: output.south!() });
if (cardinal === dir.east)
return output.double!({ dir1: output.east!(), dir2: output.west!() });
if (cardinal === dir.south)
return output.double!({ dir1: output.south!(), dir2: output.north!() });
if (cardinal === dir.west)
return output.double!({ dir1: output.west!(), dir2: output.east!() });
// Or not?
data.calledSeekerSwords = false;
return;
}
// Find the cleave we're missing and add it to the list.
const finalCleaveList = ['F7', 'F8', 'F9', 'FA'].filter((id) => !cleaves.includes(id));
const finalCleave = finalCleaveList[0];
if (finalCleave === undefined || finalCleaveList.length !== 1) {
console.error(`Swords: bad intersection ${JSON.stringify(data.seekerSwords)}`);
return;
}
cleaves.push(finalCleave);
// Seen three clones, which means we weren't able to call with two.
// Try to call out something the best we can.
// "offset" here, being rotate 1/8 of a circle clockwise from 0=north, so 0=NE now.
// This is the unsafe direction. We convert to numbers so we can rotate them.
const offsetDir = { frontRight: 0, backRight: 1, backLeft: 2, frontLeft: 3 };
const cleaveToOffsetDir: { [cleave: string]: number } = {
F7: offsetDir.frontRight,
F8: offsetDir.backRight,
FA: offsetDir.backLeft,
F9: offsetDir.frontLeft,
};
const offsetCleaves = cleaves.map((id) => rotateDir(cleaveToOffsetDir[id] ?? 0));
// Front is rotated to out.
const cloneOffsetCleaveToDirection = {
[offsetDir.frontRight]: output.in!(),
[offsetDir.backRight]: output.out!(),
[offsetDir.backLeft]: output.out!(),
[offsetDir.frontLeft]: output.in!(),
};
// Front is rotated to north.
const bossOffsetCleaveToDirection = {
[offsetDir.frontRight]: output.dirSW!(),
[offsetDir.backRight]: output.dirNW!(),
[offsetDir.backLeft]: output.dirNE!(),
[offsetDir.frontLeft]: output.dirSE!(),
};
const offsetCleaveToDirection = isClone
? cloneOffsetCleaveToDirection
: bossOffsetCleaveToDirection;
data.calledSeekerSwords = true;
const dirs = offsetCleaves.map((dir) => offsetCleaveToDirection[dir]);
return output.quadruple!({ dir1: dirs[0], dir2: dirs[1], dir3: dirs[2], dir4: dirs[3] });
},
outputStrings: {
north: Outputs.north,
east: Outputs.east,
south: Outputs.south,
west: Outputs.west,
in: Outputs.in,
out: Outputs.out,
// Backup for bad patterns.
dirNE: Outputs.dirNE,
dirSE: Outputs.dirSE,
dirSW: Outputs.dirSW,
dirNW: Outputs.dirNW,
double: {
en: '${dir1} > ${dir2}',
de: '${dir1} > ${dir2}',
fr: '${dir1} > ${dir2}',
ja: '${dir1} > ${dir2}',
cn: '${dir1} > ${dir2}',
ko: '${dir1} > ${dir2}',
},
quadruple: {
en: '${dir1} > ${dir2} > ${dir3} > ${dir4}',
de: '${dir1} > ${dir2} > ${dir3} > ${dir4}',
fr: '${dir1} > ${dir2} > ${dir3} > ${dir4}',
ja: '${dir1} > ${dir2} > ${dir3} > ${dir4}',
cn: '${dir1} > ${dir2} > ${dir3} > ${dir4}',
ko: '${dir1} > ${dir2} > ${dir3} > ${dir4}',
},
},
},
{
id: 'DelubrumSav Seeker Baleful Swath',
type: 'StartsUsing',
// This is an early warning on the Verdant Path cast.
netRegex: NetRegexes.startsUsing({ source: 'Trinity Seeker', id: '5A98', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Trinität Der Sucher', id: '5A98', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Trinité Soudée', id: '5A98', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'トリニティ・シーカー', id: '5A98', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '求道之三位一体', id: '5A98', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '탐구의 삼위일체', id: '5A98', capture: false }),
response: Responses.goFrontBack('info'),
// Merciful arc can be skipped, so if we get here, the next headmarker is burning chains.
// If we have seen merciful arc, this is a noop.
run: (data) => data.firstUnknownHeadmarker = headmarker.burningChains,
},
{
id: 'DelubrumSav Seeker Act Of Mercy',
type: 'StartsUsing',
// This is an early warning on the Verdant Path cast.
netRegex: NetRegexes.startsUsing({ source: 'Trinity Seeker', id: '5A97', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Trinität Der Sucher', id: '5A97', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Trinité Soudée', id: '5A97', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'トリニティ・シーカー', id: '5A97', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '求道之三位一体', id: '5A97', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '탐구의 삼위일체', id: '5A97', capture: false }),
alertText: (_data, _matches, output) => output.text!(),
outputStrings: {
text: {
// "Intercardinals" may confuse people between absolute and relative,
// so add in the "of boss" just to be extra clear.
en: 'Go Intercardinal of Boss',
de: 'Geh in eine Intercardinale Himmelsrichtung vom Boss',
fr: 'Allez en intercardinal du boss',
ja: 'ボスの斜めへ',
cn: '去Boss的对角线方向',
ko: '보스의 대각선 방향으로 피하기',
},
},
},
{
id: 'DelubrumSav Seeker Iron Impact',
type: 'StartsUsing',
// This is an early warning on the Verdant Path cast.
netRegex: NetRegexes.startsUsing({ source: 'Trinity Seeker', id: '5A99', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Trinität Der Sucher', id: '5A99', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Trinité Soudée', id: '5A99', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'トリニティ・シーカー', id: '5A99', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '求道之三位一体', id: '5A99', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '탐구의 삼위일체', id: '5A99', capture: false }),
alertText: (_data, _matches, output) => output.text!(),
outputStrings: {
text: {
en: 'Get Behind For Line Stack',
de: 'Geh hinter den Boss für Linien-Stack',
fr: 'Passez derrière pour le package en ligne',
ja: '後ろに直線頭割りを準備',
cn: '去后方,准备直线分摊',
ko: '보스 뒤에서 직선 쉐어 맞기',
},
},
},
{
id: 'DelubrumSav Seeker Baleful Onslaught Buster',
type: 'StartsUsing',
netRegex: NetRegexes.startsUsing({ source: 'Trinity Seeker', id: '5AD5', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Trinität Der Sucher', id: '5AD5', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Trinité Soudée', id: '5AD5', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'トリニティ・シーカー', id: '5AD5', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '求道之三位一体', id: '5AD5', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '탐구의 삼위일체', id: '5AD5', capture: false }),
response: (data, _matches, output) => {
// cactbot-builtin-response
output.responseOutputStrings = {
avoidTankCleave: Outputs.avoidTankCleave,
sharedTankBuster: {
en: 'Shared Tank Buster',
de: 'Geteilter Tank Buster',
fr: 'Partagez le Tank buster',
ja: '頭割りタンクバスター',
cn: '分摊死刑',
ko: '쉐어 탱버',
},
};
if (data.role === 'tank' || data.role === 'healer')
return { alertText: output.sharedTankBuster!() };
return { infoText: output.avoidTankCleave!() };
},
},
{
id: 'DelubrumSav Seeker Baleful Onslaught Solo',
type: 'StartsUsing',
netRegex: NetRegexes.startsUsing({ source: 'Trinity Seeker', id: '5AD6', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Trinität Der Sucher', id: '5AD6', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Trinité Soudée', id: '5AD6', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'トリニティ・シーカー', id: '5AD6', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '求道之三位一体', id: '5AD6', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '탐구의 삼위일체', id: '5AD6', capture: false }),
alertText: (_data, _matches, output) => output.text!(),
outputStrings: {
text: {
en: 'Solo Tank Cleave',
de: 'Solo Tank Cleave',
fr: 'Tank cleave solo',
ja: 'ソロタンクバスター',
cn: '单吃死刑顺劈',
ko: '광역 탱버 혼자맞기',
},
},
},
{
id: 'DelubrumSav Seeker Baleful Blade Out',
type: 'StartsUsing',
netRegex: NetRegexes.startsUsing({ source: 'Trinity Seeker', id: '5ABE', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Trinität Der Sucher', id: '5ABE', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Trinité Soudée', id: '5ABE', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'トリニティ・シーカー', id: '5ABE', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '求道之三位一体', id: '5ABE', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '탐구의 삼위일체', id: '5ABE', capture: false }),
alertText: (_data, _matches, output) => output.text!(),
outputStrings: {
text: {
en: 'Hide Behind Barricade',
de: 'Hinter den Barrikaden verstecken',
fr: 'Cachez-vous derrière la barricade',
ja: '柵の後ろに',
cn: '躲在栅栏后',
ko: '울타리 뒤에 숨기',
},
},
},
{
id: 'DelubrumSav Seeker Baleful Blade Knockback',
type: 'StartsUsing',
netRegex: NetRegexes.startsUsing({ source: 'Trinity Seeker', id: '5ABF', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Trinität Der Sucher', id: '5ABF', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Trinité Soudée', id: '5ABF', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'トリニティ・シーカー', id: '5ABF', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '求道之三位一体', id: '5ABF', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '탐구의 삼위일체', id: '5ABF', capture: false }),
alertText: (_data, _matches, output) => output.text!(),
outputStrings: {
text: {
en: 'Knockback Into Barricade',
de: 'Rückstoß in die Barrikaden',
fr: 'Poussée contre la barricade',
ja: '柵に吹き飛ばされる',
cn: '击退到栅栏上',
ko: '울타리로 넉백당하기',
},
},
},
{
id: 'DelubrumSav Seeker Merciful Moon',
type: 'StartsUsing',
// No cast time on this in savage, but Merciful Blooms cast is a ~3s warning.
netRegex: NetRegexes.startsUsing({ source: 'Trinity Seeker', id: '5ACA', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Trinität Der Sucher', id: '5ACA', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Trinité Soudée', id: '5ACA', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'トリニティ・シーカー', id: '5ACA', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '求道之三位一体', id: '5ACA', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '탐구의 삼위일체', id: '5ACA', capture: false }),
alertText: (_data, _matches, output) => output.text!(),
outputStrings: {
text: {
en: 'Look Away From Orb',
de: 'Schau weg vom Orb',
fr: 'Ne regardez pas l\'orbe',
ja: '玉に背を向ける',
cn: '背对白球',
ko: '구슬에게서 뒤돌기',
},
},
},
{
id: 'DelubrumSav Seeker Merciful Blooms',
type: 'Ability',
// Call this on the ability of Merciful Moon, it starts casting much earlier.
netRegex: NetRegexes.ability({ source: 'Aetherial Orb', id: '5AC9', capture: false }),
netRegexDe: NetRegexes.ability({ source: 'Magiekugel', id: '5AC9', capture: false }),
netRegexFr: NetRegexes.ability({ source: 'Amas D\'Éther Élémentaire', id: '5AC9', capture: false }),
netRegexJa: NetRegexes.ability({ source: '魔力塊', id: '5AC9', capture: false }),
netRegexCn: NetRegexes.ability({ source: '魔力块', id: '5AC9', capture: false }),
netRegexKo: NetRegexes.ability({ source: '마력 덩어리', id: '5AC9', capture: false }),
suppressSeconds: 1,
infoText: (_data, _matches, output) => output.text!(),
outputStrings: {
text: {
en: 'Away From Purple',
de: 'Schau weg von Lila',
fr: 'Éloignez-vous du violet',
ja: '花に避ける',
cn: '远离紫花',
ko: '꽃 장판에서 멀리 떨어지기',
},
},
},
{
id: 'DelubrumSav Seeker Dead Iron',
type: 'Tether',
// Headmarkers are randomized, so use the tether instead.
netRegex: NetRegexes.tether({ target: 'Trinity Seeker', id: '01DB' }),
netRegexDe: NetRegexes.tether({ target: 'Trinität Der Sucher', id: '01DB' }),
netRegexFr: NetRegexes.tether({ target: 'Trinité Soudée', id: '01DB' }),
netRegexJa: NetRegexes.tether({ target: 'トリニティ・シーカー', id: '01DB' }),
netRegexCn: NetRegexes.tether({ target: '求道之三位一体', id: '01DB' }),
netRegexKo: NetRegexes.tether({ target: '탐구의 삼위일체', id: '01DB' }),
condition: (data, matches) => matches.source === data.me,
alarmText: (_data, _matches, output) => output.earthshaker!(),
outputStrings: {
earthshaker: {
en: 'Earthshaker, away from boss',
de: 'Erdstoß, weg vom Boss',
fr: 'Secousse, éloignez-vous du boss',
ja: 'アースシェイカー、ボスから離れる',
cn: '大地摇动,远离Boss',
ko: '어스징, 보스에게서 떨어지기',
},
},
},
{
id: 'DelubrumSav Seeker Iron Splitter',
type: 'StartsUsing',
netRegex: NetRegexes.startsUsing({ source: ['Trinity Seeker', 'Seeker Avatar'], id: '5AC0' }),
netRegexDe: NetRegexes.startsUsing({ source: ['Trinität Der Sucher', 'Spaltteil Der Sucher'], id: '5AC0' }),
netRegexFr: NetRegexes.startsUsing({ source: ['Trinité Soudée', 'Clone De La Trinité Soudée'], id: '5AC0' }),
netRegexJa: NetRegexes.startsUsing({ source: ['トリニティ・シーカー', 'シーカーの分体'], id: '5AC0' }),
netRegexCn: NetRegexes.startsUsing({ source: ['求道之三位一体', '求道之分身'], id: '5AC0' }),
netRegexKo: NetRegexes.startsUsing({ source: ['탐구의 삼위일체', '탐구의 분열체'], id: '5AC0' }),
promise: async (data, matches) => {
const seekerData = await callOverlayHandler({
call: 'getCombatants',
ids: [parseInt(matches.sourceId, 16)],
});
if (seekerData === null) {
console.error(`Iron Splitter: null data`);
return;
}
if (!seekerData.combatants) {
console.error(`Iron Splitter: null combatants`);
return;
}
if (seekerData.combatants.length !== 1) {
console.error(`Iron Splitter: expected 1, got ${seekerData.combatants.length}`);
return;
}
const seeker = seekerData.combatants[0];
if (!seeker)
return;
const x = seeker.PosX - seekerCenterX;
const y = seeker.PosY - seekerCenterY;
data.splitterDist = Math.hypot(x, y);
},
alertText: (data, _matches, output) => {
if (data.splitterDist === undefined)
return;
// All 100 examples I've looked at only hit distance=10, or distance=~14
// Guessing at the other distances, if they exist.
//
// blue inner = 0?
// white inner = 6?
// blue middle = 10
// white middle = 14
// blue outer = 18?
// white outer = 22?
const isWhite = Math.floor(data.splitterDist / 4) % 2;
return isWhite ? output.goBlue!() : output.goWhite!();
},
outputStrings: {
goBlue: {
en: 'Blue Stone',
de: 'Blauer Stein',
fr: 'Pierre bleue',
ja: '青い床へ',
cn: '去蓝色',
ko: '파랑 장판으로',
},
goWhite: {
en: 'White Sand',
de: 'Weißer Sand',
fr: 'Sable blanc',
ja: '白い床へ',
cn: '去白色',
ko: '모래 장판으로',
},
},
},
{
id: 'DelubrumSav Seeker Baleful Comet Direction',
type: 'Ability',
netRegex: NetRegexes.abilityFull({ source: 'Seeker Avatar', id: '5AD7' }),
netRegexDe: NetRegexes.abilityFull({ source: 'Spaltteil Der Sucher', id: '5AD7' }),
netRegexFr: NetRegexes.abilityFull({ source: 'Clone De La Trinité Soudée', id: '5AD7' }),
netRegexJa: NetRegexes.abilityFull({ source: 'シーカーの分体', id: '5AD7' }),
netRegexCn: NetRegexes.abilityFull({ source: '求道之分身', id: '5AD7' }),
netRegexKo: NetRegexes.abilityFull({ source: '탐구의 분열체', id: '5AD7' }),
condition: (data, matches) => {
data.seekerCometIds ??= [];
data.seekerCometIds.push(parseInt(matches.sourceId, 16));
return data.seekerCometIds.length === 2;
},
delaySeconds: 0.5,
// In case this hits multiple people.
// (Note: Suppressed status is checked before condition, but the field evaluated after.)
suppressSeconds: 0.5,
promise: async (data) => {
// The avatars get moved right before the comets, and the position data
// is stale in the combat log. :C
const cometData = await callOverlayHandler({
call: 'getCombatants',
ids: data.seekerCometIds?.slice(0, 2),
});
if (cometData === null) {
console.error('Baleful Comet: null cometData');
return;
}
if (!cometData.combatants) {
console.error('Baleful Comet: null combatants');
return;
}
if (!cometData.combatants.length) {
console.error('Baleful Comet: empty combatants');
return;
}
if (cometData.combatants.length !== 2) {
console.error(`Baleful Comet: weird length: ${cometData.combatants.length}`);
return;
}
data.seekerCometData = cometData.combatants;
},
infoText: (data, _matches, output) => {
if (!data.seekerCometData)
throw new UnreachableCode();
const cometIds = data.seekerCometIds;
if (!cometIds)
throw new UnreachableCode();
// The returned data does not come back in the same order.
// Sort by the original order.
data.seekerCometData.sort((a, b) => {
return cometIds.indexOf(a.ID ?? 0) - cometIds.indexOf(b.ID ?? 0);
});
const [firstDir, secondDir] = data.seekerCometData.map((comet) => {
const x = comet.PosX - seekerCenterX;
const y = comet.PosY - seekerCenterY;
const dir = Math.round(4 - 4 * Math.atan2(x, y) / Math.PI) % 8;
return dir;
});
if (firstDir === undefined || secondDir === undefined)
throw new UnreachableCode();
let rotateStr = output.unknown!();
let safeDir;
if (Math.abs(secondDir - firstDir) === 1) {
rotateStr = secondDir > firstDir ? output.clockwise!() : output.counterclockwise!();
safeDir = (secondDir > firstDir ? firstDir - 1 + 8 : firstDir + 1) % 8;
} else {
// edge case where one dir is 0 and the other is 7.
rotateStr = firstDir === 7 ? output.clockwise!() : output.counterclockwise!();
safeDir = firstDir === 7 ? safeDir = 6 : safeDir = 1;
}
const initialDir = [
'north',
'northeast',
'east',
'southeast',
'south',
'southwest',
'west',
'northwest',
][safeDir];
if (!initialDir)
throw new UnreachableCode();
return output.text!({ dir: output[initialDir]!(), rotate: rotateStr });
},
outputStrings: {
unknown: Outputs.unknown,
north: Outputs.north,
northeast: Outputs.northeast,
east: Outputs.east,
southeast: Outputs.southeast,
south: Outputs.south,
southwest: Outputs.southwest,
west: Outputs.west,
northwest: Outputs.northwest,
clockwise: {
en: 'Clockwise',
de: 'Im Uhrzeigersinn',
fr: 'Sens horaire',
ja: '時針回り',
cn: '顺时针',
ko: '시계방향',
},
counterclockwise: {
en: 'Counter-clock',
de: 'Gegen den Uhrzeigersinn',
fr: 'Anti-horaire',
ja: '逆時針回り',
cn: '逆时针',
ko: '반시계방향',
},
text: {
en: 'Go ${dir}, then ${rotate}',
de: 'Geh nach ${dir}, danach ${rotate}',
fr: 'Direction ${dir}, puis ${rotate}',
ja: '${dir}へ、そして${rotate}',
cn: '去${dir},然后${rotate}旋转',
ko: '${dir}으로 간 뒤, ${rotate}',
},
},
},
{
id: 'DelubrumSav Seeker Baleful Comet Cleanup',
type: 'Ability',
netRegex: NetRegexes.ability({ source: 'Seeker Avatar', id: '5AD7', capture: false }),
netRegexDe: NetRegexes.ability({ source: 'Spaltteil Der Sucher', id: '5AD7', capture: false }),
netRegexFr: NetRegexes.ability({ source: 'Clone De La Trinité Soudée', id: '5AD7', capture: false }),
netRegexJa: NetRegexes.ability({ source: 'シーカーの分体', id: '5AD7', capture: false }),
netRegexCn: NetRegexes.ability({ source: '求道之分身', id: '5AD7', capture: false }),
netRegexKo: NetRegexes.ability({ source: '탐구의 분열체', id: '5AD7', capture: false }),
delaySeconds: 10,
suppressSeconds: 10,
run: (data) => delete data.seekerCometIds,
},
{
id: 'DelubrumSav Seeker Burning Chains',
type: 'HeadMarker',
netRegex: NetRegexes.headMarker(),
condition: (data, matches) => {
if (data.me !== matches.target)
return false;
return getHeadmarkerId(data, matches) === headmarker.burningChains;
},
alertText: (_data, _matches, output) => output.text!(),
outputStrings: {
text: {
en: 'Chain on YOU',
de: 'Kette auf DIR',
fr: 'Chaîne sur VOUS',
ja: '自分に鎖',
cn: '锁链点名',
ko: '사슬 대상자',
},
},
},
{
id: 'DelubrumSav Seeker Burning Chains Move',
type: 'GainsEffect',
netRegex: NetRegexes.gainsEffect({ effectId: '301' }),
condition: Conditions.targetIsYou(),
response: Responses.breakChains(),
},
{
id: 'DelubrumSav Seeker Merciful Arc',
type: 'HeadMarker',
netRegex: NetRegexes.headMarker(),
condition: (data, matches) => getHeadmarkerId(data, matches) === headmarker.mercifulArc,
response: Responses.tankCleave(),
},
{
id: 'DelubrumSav Dahu Shockwave',
type: 'StartsUsing',
netRegex: NetRegexes.startsUsing({ source: 'Dahu', id: ['5770', '576F'] }),
netRegexDe: NetRegexes.startsUsing({ source: 'Dahu', id: ['5770', '576F'] }),
netRegexFr: NetRegexes.startsUsing({ source: 'Dahu', id: ['5770', '576F'] }),
netRegexJa: NetRegexes.startsUsing({ source: 'ダウー', id: ['5770', '576F'] }),
netRegexCn: NetRegexes.startsUsing({ source: '大兀', id: ['5770', '576F'] }),
netRegexKo: NetRegexes.startsUsing({ source: '다후', id: ['5770', '576F'] }),
// There's a 3s slow windup on the first, then a 1s opposite cast.
suppressSeconds: 10,
alertText: (_data, matches, output) => {
if (matches.id === '5770')
return output.leftThenRight!();
return output.rightThenLeft!();
},
outputStrings: {
leftThenRight: {
en: 'Left, Then Right',
de: 'Links, dann Rechts',
fr: 'À gauche, puis à droite',
ja: '左 => 右',
cn: '左 => 右',
ko: '왼쪽 => 오른쪽',
},
rightThenLeft: {
en: 'Right, Then Left',
de: 'Rechts, dann Links',
fr: 'À droite, puis à gauche',
ja: '右 => 左',
cn: '右 => 左',
ko: '오른쪽 => 왼쪽',
},
},
},
{
id: 'DelubrumSav Dahu Hot Charge',
type: 'StartsUsing',
netRegex: NetRegexes.startsUsing({ source: 'Dahu', id: '5773', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Dahu', id: '5773', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Dahu', id: '5773', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'ダウー', id: '5773', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '大兀', id: '5773', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '다후', id: '5773', capture: false }),
suppressSeconds: 10,
alertText: (data, _matches, output) => {
if (data.seenHotCharge)
return output.oneOrTwoCharges!();
return output.followSecondCharge!();
},
run: (data) => {
data.seenHotCharge = true;
data.firstUnknownHeadmarker = headmarker.spitFlame1;
},
outputStrings: {
oneOrTwoCharges: {
en: 'Follow One or Two Charges',
de: 'Folge dem 1. oder 2. Ansturm',
fr: 'Suivez 1 ou 2 charges',
ja: '1回目や2回目の突進に追う',
cn: '紧跟第一次或第二次冲锋',
ko: '첫번째나 두번째 돌진 따라가기',
},
followSecondCharge: {
en: 'Follow Second Charge',
de: 'Folge dem 2. Ansturm',
fr: 'Suivez la deuxième charge',
ja: '2回目の突進に追う',
cn: '紧跟第二次冲锋',
ko: '두번째 돌진 따라가기',
},
},
},
{
id: 'DelubrumSav Dahu Spit Flame',
type: 'HeadMarker',
netRegex: NetRegexes.headMarker(),
condition: (data, matches) => {
if (data.me !== matches.target)
return false;
const id = getHeadmarkerId(data, matches);
return id >= headmarker.spitFlame1 && id <= headmarker.spitFlame4;
},
durationSeconds: 7,
alarmText: (data, matches, output) => {
const id = getHeadmarkerId(data, matches);
const num = parseInt(id, 16) - parseInt(headmarker.spitFlame1, 16) + 1;
const outputMap: { [marker: number]: string } = {
1: output.one!(),
2: output.two!(),
3: output.three!(),
4: output.four!(),
};
return outputMap[num];
},
outputStrings: {
one: Outputs.num1,
two: Outputs.num2,
three: Outputs.num3,
four: Outputs.num4,
},
},
{
id: 'DelubrumSav Dahu Feral Howl',
type: 'StartsUsing',
netRegex: NetRegexes.startsUsing({ source: 'Dahu', id: '5767', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Dahu', id: '5767', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Dahu', id: '5767', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'ダウー', id: '5767', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '大兀', id: '5767', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '다후', id: '5767', capture: false }),
alertText: (_data, _matches, output) => output.knockback!(),
outputStrings: {
knockback: {
en: 'Knockback to safe spot',
de: 'Rückstoß in den sicheren Bereich',
fr: 'Poussée en zone sûre',
ja: '安置へノックバック',
cn: '击退到安全点',
ko: '안전한 곳으로 넉백되기',
},
},
},
{
id: 'DelubrumSav Dahu Flare',
type: 'HeadMarker',
netRegex: NetRegexes.headMarker(),
condition: (data, matches) => {
if (data.me !== matches.target)
return false;
const id = getHeadmarkerId(data, matches);
return id === headmarker.flare;
},
run: (data) => data.hystericFlare = true,
},
{
id: 'DelubrumSav Dahu Hysteric Assault',
type: 'StartsUsing',
netRegex: NetRegexes.startsUsing({ source: 'Dahu', id: '5778', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Dahu', id: '5778', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Dahu', id: '5778', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'ダウー', id: '5778', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '大兀', id: '5778', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '다후', id: '5778', capture: false }),
response: (data, _matches, output) => {
// cactbot-builtin-response
output.responseOutputStrings = {
knockbackNoFlare: {
en: 'Knockback (no flare)',
de: 'Rückstoß (keine Flare)',
fr: 'Poussée (pas de brasier)',
ja: 'ノックバック (フレアなし)',
cn: '击退 (无核爆)',
ko: '넉백 (플레어 없음)',
},
knockbackWithFlare: {
en: 'Flare + Knockback (get away)',
de: 'Flare + Rückstoß (geh weg)',
fr: 'Brasier + poussée (éloignez-vous)',
ja: 'フレア + ノックバック (離れる)',
cn: '核爆 + 击退 (远离)',
ko: '플레어 + 넉백 (멀리)',
},
};
if (data.hystericFlare)
return { alarmText: output.knockbackWithFlare!() };
return { alertText: output.knockbackNoFlare!() };
},
run: (data) => delete data.hystericFlare,
},
{
id: 'DelubrumSav Guard Blood And Bone Warrior and Knight',
type: 'StartsUsing',
// 5831 from Queen's Warrior
// 5821 from Queen's Knight
netRegex: NetRegexes.startsUsing({ source: ['Queen\'s Warrior', 'Queen\'s Knight'], id: ['5831', '5821'], capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: ['Kriegerin Der Königin', 'Ritter Der Königin'], id: ['5831', '5821'], capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: ['Guerrière De La Reine', 'Chevalier De La Reine'], id: ['5831', '5821'], capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: ['クイーンズ・ウォリアー', 'クイーンズ・ナイト'], id: ['5831', '5821'], capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: ['女王战士', '女王骑士'], id: ['5831', '5821'], capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: ['여왕의 전사', '여왕의 기사'], id: ['5831', '5821'], capture: false }),
suppressSeconds: 1,
response: Responses.aoe(),
},
{
id: 'DelubrumSav Guard Queen\'s Shot and Blood And Bone Soldier',
type: 'StartsUsing',
// 5854 from Queen's Gunner
// 5841 from Queen's Soldier
netRegex: NetRegexes.startsUsing({ source: ['Queen\'s Gunner', 'Queen\'s Soldier'], id: ['5854', '5841'], capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: ['Schütze Der Königin', 'Soldat Der Königin'], id: ['5854', '5841'], capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: ['Fusilier De La Reine', 'Soldat De La Reine'], id: ['5854', '5841'], capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: ['クイーンズ・ガンナー', 'クイーンズ・ソルジャー'], id: ['5854', '5841'], capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: ['女王枪手', '女王士兵'], id: ['5854', '5841'], capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: ['여왕의 총사', '여왕의 병사'], id: ['5854', '5841'], capture: false }),
suppressSeconds: 1,
response: Responses.aoe(),
},
{
id: 'DelubrumSav Guard Optimal Offensive Sword',
type: 'StartsUsing',
netRegex: NetRegexes.startsUsing({ source: 'Queen\'s Knight', id: '5819', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Ritter Der Königin', id: '5819', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Chevalier De La Reine', id: '5819', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'クイーンズ・ナイト', id: '5819', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '女王骑士', id: '5819', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '여왕의 기사', id: '5819', capture: false }),
durationSeconds: 5,
alertText: (_data, _matches, output) => output.text!(),
outputStrings: {
text: {
en: 'Take Outside Bombs',
de: 'Nimm die äußeren Bomben',
fr: 'Prenez les bombes extérieur',
ja: '外の爆弾を取る',
cn: '吃外面的炸弹',
ko: '바깥쪽의 폭탄 사용하기',
},
},
},
{
id: 'DelubrumSav Guard Optimal Offensive Shield',
type: 'StartsUsing',
netRegex: NetRegexes.startsUsing({ source: 'Queen\'s Knight', id: '581A', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Ritter Der Königin', id: '581A', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Chevalier De La Reine', id: '581A', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'クイーンズ・ナイト', id: '581A', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '女王骑士', id: '581A', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '여왕의 기사', id: '581A', capture: false }),
durationSeconds: 5,
alertText: (_data, _matches, output) => output.text!(),
outputStrings: {
text: {
en: 'Knockback Away From Sphere',
de: 'Rückstoß weg von der Sphere',
fr: 'Poussée loin de la sphère',
ja: 'ノックバック、玉から離れる',
cn: '击退,远离球',
ko: '구슬 쪽에서 넉백',
},
},
},
{
id: 'DelubrumSav Guard Optimal Play Sword',
type: 'StartsUsing',
netRegex: NetRegexes.startsUsing({ source: 'Queen\'s Knight', id: '5816', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Ritter Der Königin', id: '5816', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Chevalier De La Reine', id: '5816', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'クイーンズ・ナイト', id: '5816', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '女王骑士', id: '5816', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '여왕의 기사', id: '5816', capture: false }),
alertText: (_data, _matches, output) => output.text!(),
outputStrings: {
text: {
en: 'Out, Avoid Cleaves',
de: 'Raus, weiche den Cleaves aus',
fr: 'À l\'extérieur, évitez les cleaves',
ja: '外へ、範囲攻撃注意',
cn: '远离,躲避顺劈',
ko: '가시 피하면서 밖으로',
},
},
},
{
id: 'DelubrumSav Guard Optimal Play Shield',
type: 'StartsUsing',
netRegex: NetRegexes.startsUsing({ source: 'Queen\'s Knight', id: '5817', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Ritter Der Königin', id: '5817', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Chevalier De La Reine', id: '5817', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'クイーンズ・ナイト', id: '5817', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '女王骑士', id: '5817', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '여왕의 기사', id: '5817', capture: false }),
alertText: (_data, _matches, output) => output.text!(),
outputStrings: {
text: {
en: 'In, Avoid Cleaves',
de: 'Rein, weiche den Cleaves aus',
fr: 'À l\'intérieur, évitez les cleaves',
ja: '中へ、範囲攻撃注意',
cn: '靠近,躲避顺劈',
ko: '가시 피하면서 안으로',
},
},
},
{
id: 'DelubrumSav Guard Yellow Tether',
type: 'Tether',
netRegex: NetRegexes.tether({ source: 'Queen\'s Warrior', target: 'Queen\'s Knight', id: '0088', capture: false }),
netRegexDe: NetRegexes.tether({ source: 'Kriegerin Der Königin', target: 'Ritter Der Königin', id: '0088', capture: false }),
netRegexFr: NetRegexes.tether({ source: 'Guerrière De La Reine', target: 'Chevalier De La Reine', id: '0088', capture: false }),
netRegexJa: NetRegexes.tether({ source: 'クイーンズ・ウォリアー', target: 'クイーンズ・ナイト', id: '0088', capture: false }),
netRegexCn: NetRegexes.tether({ source: '女王战士', target: '女王骑士', id: '0088', capture: false }),
netRegexKo: NetRegexes.tether({ source: '여왕의 전사', target: '여왕의 기사', id: '0088', capture: false }),
// Yellow tether between Knight and Warrior gives them a Physical Vulnerability Down debuff.
infoText: (_data, _matches, output) => output.text!(),
outputStrings: {
text: {
en: 'Remove yellow; apply purple',
de: 'Entferne Gelb; nimm Lila',
fr: 'Retirez le jaune; appliquez le violet',
ja: 'スチールを切り、スペルを使用',
cn: '点掉钢刺,使用铸魔',
ko: '강철화살은 지우고, 마법연성 사용',
},
},
},
{
id: 'DelubrumSav Guard Purple Tether',
type: 'Tether',
netRegex: NetRegexes.tether({ source: 'Queen\'s Warrior', target: 'Queen\'s Knight', id: '0089', capture: false }),
netRegexDe: NetRegexes.tether({ source: 'Kriegerin Der Königin', target: 'Ritter Der Königin', id: '0089', capture: false }),
netRegexFr: NetRegexes.tether({ source: 'Guerrière De La Reine', target: 'Chevalier De La Reine', id: '0089', capture: false }),
netRegexJa: NetRegexes.tether({ source: 'クイーンズ・ウォリアー', target: 'クイーンズ・ナイト', id: '0089', capture: false }),
netRegexCn: NetRegexes.tether({ source: '女王战士', target: '女王骑士', id: '0089', capture: false }),
netRegexKo: NetRegexes.tether({ source: '여왕의 전사', target: '여왕의 기사', id: '0089', capture: false }),
// Yellow tether between Knight and Warrior gives them a Physical Vulnerability Down debuff.
infoText: (_data, _matches, output) => output.text!(),
outputStrings: {
text: {
en: 'Remove purple; apply yellow',
de: 'Entferne Lila; nimm Gelb',
fr: 'Retirez le violet; appliquez le jaune',
ja: 'スペルを切り、スチールを使用',
cn: '点掉铸魔,使用钢刺',
ko: '마법연성은 지우고, 강철화살 사용',
},
},
},
{
id: 'DelubrumSav Guard Boost',
type: 'StartsUsing',
netRegex: NetRegexes.startsUsing({ source: 'Queen\'s Warrior', id: '582D', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Kriegerin Der Königin', id: '582D', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Guerrière De La Reine', id: '582D', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'クイーンズ・ウォリアー', id: '582D', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '女王战士', id: '582D', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '여왕의 전사', id: '582D', capture: false }),
infoText: (_data, _matches, output) => output.text!(),
outputStrings: {
text: {
en: 'Dispel Warrior Boost',
de: 'Reinige Kriegerin Buff',
fr: 'Dissipez le boost du Guerrier',
ja: 'ウォリアーにディスペル',
cn: '驱魔 > 战士',
ko: '여왕의 전사 디스펠',
},
},
},
{
id: 'DelubrumSav Guard Higher Power',
type: 'StartsUsing',
netRegex: NetRegexes.startsUsing({ source: 'Queen\'s Gunner', id: '5853', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Schütze Der Königin', id: '5853', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Fusilier De La Reine', id: '5853', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'クイーンズ・ガンナー', id: '5853', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '女王枪手', id: '5853', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '여왕의 총사', id: '5853', capture: false }),
infoText: (_data, _matches, output) => output.text!(),
outputStrings: {
text: {
en: 'Dispel Gun Turrets',
de: 'Reinige Schützetürme',
fr: 'Dissipez la Tourelle dirigée',
ja: 'ガンナータレットにディスペル',
cn: '驱魔 > 炮台',
ko: '총포탑 디스펠',
},
},
},
{
id: 'DelubrumSav Guard/Queen Bombslinger',
type: 'StartsUsing',
// 5AFE = Bombslinger during Queen's Guard, 5B3F = Bombslinger during The Queen
netRegex: NetRegexes.startsUsing({ source: 'Queen\'s Warrior', id: ['5AFE', '5B3F'], capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Kriegerin Der Königin', id: ['5AFE', '5B3F'], capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Guerrière De La Reine', id: ['5AFE', '5B3F'], capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'クイーンズ・ウォリアー', id: ['5AFE', '5B3F'], capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '女王战士', id: ['5AFE', '5B3F'], capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '여왕의 전사', id: ['5AFE', '5B3F'], capture: false }),
run: (data) => data.tetherIsBombslinger = true,
},
{
id: 'DelubrumSav Guard/Queen Bomb Reversal',
type: 'Tether',
netRegex: NetRegexes.tether({ target: 'Queen\'s Warrior', id: '0010', capture: false }),
netRegexDe: NetRegexes.tether({ target: 'Kriegerin Der Königin', id: '0010', capture: false }),
netRegexFr: NetRegexes.tether({ target: 'Guerrière De La Reine', id: '0010', capture: false }),
netRegexJa: NetRegexes.tether({ target: 'クイーンズ・ウォリアー', id: '0010', capture: false }),
netRegexCn: NetRegexes.tether({ target: '女王战士', id: '0010', capture: false }),
netRegexKo: NetRegexes.tether({ target: '여왕의 전사', id: '0010', capture: false }),
suppressSeconds: 1,
run: (data) => data.tetherOnBomb = true,
},
{
id: 'DelubrumSav Guard/Queen Personal Reversal',
type: 'Tether',
netRegex: NetRegexes.tether({ target: 'Queen\'s Warrior', id: '0087' }),
netRegexDe: NetRegexes.tether({ target: 'Kriegerin Der Königin', id: '0087' }),
netRegexFr: NetRegexes.tether({ target: 'Guerrière De La Reine', id: '0087' }),
netRegexJa: NetRegexes.tether({ target: 'クイーンズ・ウォリアー', id: '0087' }),
netRegexCn: NetRegexes.tether({ target: '女王战士', id: '0087' }),
netRegexKo: NetRegexes.tether({ target: '여왕의 전사', id: '0087' }),
condition: (data, matches) => matches.source === data.me,
run: (data) => data.tetherOnSelf = true,
},
{
id: 'DelubrumSav Guard/Queen Reversal Of Forces',
type: 'StartsUsing',
// Tethers to self (and bombs, if bombslinger) come out just before this starts casting.
// This is used in two places, both for Bombslinger and the Winds of Weight.
// 5829 = Reversal Of Forces during Queen's Guard, 5A0E = Reversal Of Forces during The Queen
// TODO: should we differentiate big/small/wind/lightning with alert vs info?
netRegex: NetRegexes.startsUsing({ source: 'Queen\'s Warrior', id: ['5829', '5A0E'], capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Kriegerin Der Königin', id: ['5829', '5A0E'], capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Guerrière De La Reine', id: ['5829', '5A0E'], capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'クイーンズ・ウォリアー', id: ['5829', '5A0E'], capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '女王战士', id: ['5829', '5A0E'], capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '여왕의 전사', id: ['5829', '5A0E'], capture: false }),
durationSeconds: 11,
alertText: (data, _matches, output) => {
if (data.tetherIsBombslinger) {
if (data.tetherOnBomb)
return data.tetherOnSelf ? output.bigWithTether!() : output.smallNoTether!();
return data.tetherOnSelf ? output.smallWithTether!() : output.bigNoTether!();
}
return data.tetherOnSelf ? output.windTether!() : output.lightningNoTether!();
},
run: (data) => {
delete data.tetherIsBombslinger;
delete data.tetherOnSelf;
delete data.tetherOnBomb;
},
outputStrings: {
windTether: {
en: 'Wind (tethered)',
de: 'Wind (Verbindung)',
fr: 'Vent (lié)',
ja: '風 (線)',
cn: '风 (连线)',
ko: '녹색 회오리 (선 연결)',
},
lightningNoTether: {
en: 'Lightning (no tether)',
de: 'Blitz (keine Verbindung)',
fr: 'Lumière (non liée)',
ja: '雷 (線なし)',
cn: '雷 (无连线)',
ko: '보라색 장판 (선 없음)',
},
bigNoTether: {
en: 'Big Bomb (no tether)',
de: 'Große Bombe (keine Verbindung)',
fr: 'Grosse bombe (non liée)',
ja: '大きい爆弾 (線なし)',
cn: '大炸弹 (无连线)',
ko: '큰 폭탄 (선 없음)',
},
bigWithTether: {
en: 'Big Bomb (tethered)',
de: 'Große Bombe (Verbindung)',
fr: 'Grosse bombe (liée)',
ja: '大きい爆弾 (線)',
cn: '大炸弹 (连线)',
ko: '큰 폭탄 (선 연결)',
},
smallNoTether: {
en: 'Small Bomb (no tether)',
de: 'Kleine Bombe (keine Verbindung)',
fr: 'Petite bombe (non liée)',
ja: '小さい爆弾 (線なし)',
cn: '小炸弹 (无连线)',
ko: '작은 폭탄 (선 없음)',
},
smallWithTether: {
en: 'Small Bomb (tethered)',
de: 'Kleine Bombe (Verbindung)',
fr: 'Petite bombe (liée)',
ja: '小さい爆弾 (線)',
cn: '小炸弹 (连线)',
ko: '작은 폭탄 (선 연결)',
},
},
},
{
id: 'DelubrumSav Guard Fiery Portent',
type: 'StartsUsing',
netRegex: NetRegexes.startsUsing({ source: 'Queen\'s Soldier', id: '583F' }),
netRegexDe: NetRegexes.startsUsing({ source: 'Soldat Der Königin', id: '583F' }),
netRegexFr: NetRegexes.startsUsing({ source: 'Soldat De La Reine', id: '583F' }),
netRegexJa: NetRegexes.startsUsing({ source: 'クイーンズ・ソルジャー', id: '583F' }),
netRegexCn: NetRegexes.startsUsing({ source: '女王士兵', id: '583F' }),
netRegexKo: NetRegexes.startsUsing({ source: '여왕의 병사', id: '583F' }),
delaySeconds: (_data, matches) => parseFloat(matches.castTime) - 5,
durationSeconds: 5.5,
response: Responses.stopEverything(),
},
{
id: 'DelubrumSav Guard Icy Portent',
type: 'StartsUsing',
// Assuming you need to move for 3 seconds (duration of Pyretic from Fiery Portent)
netRegex: NetRegexes.startsUsing({ source: 'Queen\'s Soldier', id: '5840' }),
netRegexDe: NetRegexes.startsUsing({ source: 'Soldat Der Königin', id: '5840' }),
netRegexFr: NetRegexes.startsUsing({ source: 'Soldat De La Reine', id: '5840' }),
netRegexJa: NetRegexes.startsUsing({ source: 'クイーンズ・ソルジャー', id: '5840' }),
netRegexCn: NetRegexes.startsUsing({ source: '女王士兵', id: '5840' }),
netRegexKo: NetRegexes.startsUsing({ source: '여왕의 병사', id: '5840' }),
delaySeconds: (_data, matches) => parseFloat(matches.castTime) - 5,
durationSeconds: 5.5,
response: Responses.moveAround('alert'),
},
{
id: 'DelubrumSav Guard Above Board Warning',
type: 'StartsUsing',
// 5826 in Guard fight, 5A0B in Queen fight.
netRegex: NetRegexes.startsUsing({ source: 'Queen\'s Warrior', id: ['5826', '5A0B'], capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Kriegerin Der Königin', id: ['5826', '5A0B'], capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Guerrière De La Reine', id: ['5826', '5A0B'], capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'クイーンズ・ウォリアー', id: ['5826', '5A0B'], capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '女王战士', id: ['5826', '5A0B'], capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '여왕의 전사', id: ['5826', '5A0B'], capture: false }),
delaySeconds: 9.5,
response: Responses.moveAway(),
},
{
id: 'DelubrumSav Guard Queen\'s Shot',
type: 'StartsUsing',
netRegex: NetRegexes.startsUsing({ source: 'Queen\'s Gunner', id: '584C', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Schütze Der Königin', id: '584C', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Fusilier De La Reine', id: '584C', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'クイーンズ・ガンナー', id: '584C', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '女王枪手', id: '584C', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '여왕의 총사', id: '584C', capture: false }),
// This has a 7 second cast time.
delaySeconds: 3.5,
alertText: (_data, _matches, output) => output.text!(),
outputStrings: {
text: {
// Hard to say "point the opening in the circle around you at the gunner" succinctly.
en: 'Point at the Gunner',
de: 'Auf den Schützen zeigen',
fr: 'Pointez sur le Fusiller',
ja: '切り目をガンナーに向く',
cn: '将缺口对准枪手',
ko: '총사쪽으로 위치 맞추기',
},
},
},
{
id: 'DelubrumSav Queen Queen\'s Shot',
type: 'StartsUsing',
netRegex: NetRegexes.startsUsing({ source: 'Queen\'s Gunner', id: '5A2D', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Schütze Der Königin', id: '5A2D', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Fusilier De La Reine', id: '5A2D', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'クイーンズ・ガンナー', id: '5A2D', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '女王枪手', id: '5A2D', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '여왕의 총사', id: '5A2D', capture: false }),
// This has a 7 second cast time.
delaySeconds: 3.5,
alertText: (_data, _matches, output) => output.text!(),
outputStrings: {
text: {
// This gunner is always in the northwest during Queen, vs in Guard where it is tankable.
en: 'Point at the Gunner (in northwest)',
de: 'Auf den Schützen zeigen (im Nord-Westen)',
fr: 'Pointez sur le Fusiller (au nord-ouest)',
ja: '切り目を (北西) ガンナーに向く',
cn: '将缺口对准西北(左上)枪手',
ko: '(북서쪽에 있는) 총사쪽으로 위치 맞추기',
},
},
},
{
id: 'DelubrumSav Guard Queen\'s Shot Followup',
type: 'Ability',
netRegex: NetRegexes.ability({ source: 'Queen\'s Gunner', id: ['584C', '5A2D'], capture: false }),
netRegexDe: NetRegexes.ability({ source: 'Schütze Der Königin', id: ['584C', '5A2D'], capture: false }),
netRegexFr: NetRegexes.ability({ source: 'Fusilier De La Reine', id: ['584C', '5A2D'], capture: false }),
netRegexJa: NetRegexes.ability({ source: 'クイーンズ・ガンナー', id: ['584C', '5A2D'], capture: false }),
netRegexCn: NetRegexes.ability({ source: '女王枪手', id: ['584C', '5A2D'], capture: false }),
netRegexKo: NetRegexes.ability({ source: '여왕의 총사', id: ['584C', '5A2D'], capture: false }),
suppressSeconds: 1,
infoText: (_data, _matches, output) => output.text!(),
outputStrings: {
text: {
en: 'Point at the Turret',
de: 'Auf den Geschützturm zeigen',
fr: 'Pointez sur la Tourelle',
ja: '切り目をタレットに向く',
cn: '将缺口对准炮台',
ko: '포탑쪽으로 위치 맞추기',
},
},
},
{
id: 'DelubrumSav Guard Coat of Arms',
type: 'StartsUsing',
netRegex: NetRegexes.startsUsing({ source: 'Aetherial Ward', id: '5820' }),
netRegexDe: NetRegexes.startsUsing({ source: 'Barriere', id: '5820' }),
netRegexFr: NetRegexes.startsUsing({ source: 'Barrière Magique', id: '5820' }),
netRegexJa: NetRegexes.startsUsing({ source: '魔法障壁', id: '5820' }),
netRegexCn: NetRegexes.startsUsing({ source: '魔法障壁', id: '5820' }),
netRegexKo: NetRegexes.startsUsing({ source: '마법 장벽', id: '5820' }),
delaySeconds: (_data, matches) => parseFloat(matches.castTime) - 2.5,
suppressSeconds: 1,
alertText: (_data, _matches, output) => output.text!(),
outputStrings: {
text: {
en: 'Stop attacking',
de: 'Angriffe stoppen',
fr: 'Arrêtez d\'attaquer',
ja: '攻撃禁止',
cn: '停止攻击',
ko: '공격 중지',
},
},
},
{
id: 'DelubrumSav Phantom Malediction Of Agony',
type: 'StartsUsing',
netRegex: NetRegexes.startsUsing({ source: 'Bozjan Phantom', id: '57BD', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Bozja-Phantom', id: '57BD', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Fantôme Bozjien', id: '57BD', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'ボズヤ・ファントム', id: '57BD', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '博兹雅幻灵', id: '57BD', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '보즈야 유령', id: '57BD', capture: false }),
response: Responses.aoe(),
},
{
id: 'DelubrumSav Phantom Weave Miasma',
type: 'StartsUsing',
netRegex: NetRegexes.startsUsing({ source: 'Bozjan Phantom', id: '57B2', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Bozja-Phantom', id: '57B2', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Fantôme Bozjien', id: '57B2', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'ボズヤ・ファントム', id: '57B2', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '博兹雅幻灵', id: '57B2', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '보즈야 유령', id: '57B2', capture: false }),
infoText: (data, _matches, output) => {
data.weaveCount = (data.weaveCount || 0) + 1;
if (data.weaveCount === 1)
return output.firstWeave!();
else if (data.weaveCount === 2)
return output.secondWeave!();
},
outputStrings: {
firstWeave: {
en: 'Go North (donut bottom/circle top)',
de: 'Geh nach Norden (Donut unten/Kreise oben)',
fr: 'Allez au nord (donut bas/cercle haut)',
ja: '北へ (下にドーナツ/上に円)',
cn: '去下环上圆列北侧',
ko: '북쪽으로 (도넛이 아래, 원이 위인 곳)',
},
secondWeave: {
en: 'Stay South (square bottom/circle top)',
de: 'Geh nach Süden (Viereck unten/Kreise oben)',
fr: 'Restez au sud (fond carré/cercle haut)',
ja: '南へ(下に四角/上に円)',
cn: '待在下方上圆列南侧',
ko: '남쪽으로 (사각형이 아래, 원이 위인 곳)',
},
},
},
{
id: 'DelubrumSav Phantom Stuffy Wrath',
type: 'AddedCombatant',
// Spawns after 57BA Summon, either North (-403.5) or South (-344.5)
// Casts 57C2 Undying Hatred
netRegex: NetRegexes.addedCombatantFull({ npcNameId: '9756' }),
durationSeconds: 5,
suppressSeconds: 1,
response: (_data, matches, output) => {
// cactbot-builtin-response
output.responseOutputStrings = {
goSouth: {
en: 'Go South; Knockback to Glowing Donut',
de: 'Geh nach Süden; Rückstoß zum leuchtenden Donut',
fr: 'Allez au sud; Poussée du donut brillant',
ja: '南へ、光ってるドーナツへノックバック',
cn: '去发光环形列南侧',
ko: '남쪽으로, 빛나는 도넛쪽으로 넉백',
},
goNorth: {
en: 'Go North; Knockback from Glowing Circle',
de: 'Geh nach Norden; Rückstoß zum leuchtenden Kreis',
fr: 'Allez au nord; Poussée du cercle brillant',
ja: '北へ、光ってる円からノックバック',
cn: '去发光圆形列北侧',
ko: '북쪽으로, 빛나는 원에서 넉백',
},
};
// The sum of the two possible spawn locations divided by two.
if (parseFloat(matches.y) < -374)
return { alertText: output.goNorth!() };
return { alertText: output.goSouth!() };
},
},
{
id: 'DelubrumSav Phantom Vile Wave',
type: 'StartsUsing',
netRegex: NetRegexes.startsUsing({ source: 'Bozjan Phantom', id: '57BF', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Bozja-Phantom', id: '57BF', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Fantôme Bozjien', id: '57BF', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'ボズヤ・ファントム', id: '57BF', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '博兹雅幻灵', id: '57BF', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '보즈야 유령', id: '57BF', capture: false }),
response: Responses.getBehind(),
},
{
id: 'DelubrumSav Phantom Ice Spikes',
type: 'StartsUsing',
// Ice Spikes (effectId: '9E0') reflects damage, wait for Dispel
// Buff expires about 16 seconds on first cast, ~8 seconds later casts)
netRegex: NetRegexes.startsUsing({ source: 'Bozjan Phantom', id: '57BC', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Bozja-Phantom', id: '57BC', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Fantôme Bozjien', id: '57BC', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'ボズヤ・ファントム', id: '57BC', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '博兹雅幻灵', id: '57BC', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '보즈야 유령', id: '57BC', capture: false }),
delaySeconds: 3,
alertText: (_data, _matches, output) => output.text!(),
outputStrings: {
text: {
en: 'Stop Attacking, Dispel Ice Spikes',
de: 'Angriffe stoppen, entferne Eisstachel',
fr: 'Arrêtez d\'attaquer, dissipez les pics de glace',
ja: '攻撃停止、ファントムにディスペル',
cn: '停手,驱魔 > 幻灵',
ko: '공격 중지, 보스 디스펠',
},
},
},
{
id: 'DelubrumSav Phantom Excruciation',
type: 'StartsUsing',
netRegex: NetRegexes.startsUsing({ source: 'Bozjan Phantom', id: '57BE' }),
netRegexDe: NetRegexes.startsUsing({ source: 'Bozja-Phantom', id: '57BE' }),
netRegexFr: NetRegexes.startsUsing({ source: 'Fantôme Bozjien', id: '57BE' }),
netRegexJa: NetRegexes.startsUsing({ source: 'ボズヤ・ファントム', id: '57BE' }),
netRegexCn: NetRegexes.startsUsing({ source: '博兹雅幻灵', id: '57BE' }),
netRegexKo: NetRegexes.startsUsing({ source: '보즈야 유령', id: '57BE' }),
condition: tankBusterOnParty,
response: Responses.tankBuster(),
},
{
id: 'DelubrumSav Avowed Wrath Of Bozja',
type: 'StartsUsing',
netRegex: NetRegexes.startsUsing({ source: 'Trinity Avowed', id: '594E', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Trinität Der Eingeschworenen', id: '594E', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Trinité Féale', id: '594E', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'トリニティ・アヴァウド', id: '594E', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '誓约之三位一体', id: '594E', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '맹세의 삼위일체', id: '594E', capture: false }),
response: (data, _matches, output) => {
// cactbot-builtin-response
output.responseOutputStrings = {
avoidTankCleave: Outputs.avoidTankCleave,
sharedTankBuster: {
en: 'Shared Tank Buster',
de: 'Geteilter Tank Buster',
fr: 'Partagez le Tank buster',
ja: '頭割りタンクバスター',
cn: '分摊死刑',
ko: '쉐어 탱버',
},
};
if (data.role === 'tank' || data.role === 'healer')
return { alertText: output.sharedTankBuster!() };
return { infoText: output.avoidTankCleave!() };
},
},
{
id: 'DelubrumSav Avowed Fury Of Bozja',
type: 'StartsUsing',
// Allegiant Arsenal 5987 = staff (out), followed up with Fury of Bozja 594C
netRegex: NetRegexes.startsUsing({ source: 'Trinity Avowed', id: '5987', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Trinität Der Eingeschworenen', id: '5987', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Trinité Féale', id: '5987', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'トリニティ・アヴァウド', id: '5987', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '誓约之三位一体', id: '5987', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '맹세의 삼위일체', id: '5987', capture: false }),
response: Responses.getOut(),
run: (data) => data.avowedPhase = 'staff',
},
{
id: 'DelubrumSav Avowed Flashvane',
type: 'StartsUsing',
// Allegiant Arsenal 5986 = bow (get behind), followed up by Flashvane 594B
netRegex: NetRegexes.startsUsing({ source: 'Trinity Avowed', id: '5986', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Trinität Der Eingeschworenen', id: '5986', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Trinité Féale', id: '5986', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'トリニティ・アヴァウド', id: '5986', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '誓约之三位一体', id: '5986', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '맹세의 삼위일체', id: '5986', capture: false }),
response: Responses.getBehind(),
run: (data) => data.avowedPhase = 'bow',
},
{
id: 'DelubrumSav Avowed Infernal Slash',
type: 'StartsUsing',
// Allegiant Arsenal 5985 = sword (get front), followed up by Infernal Slash 594A
netRegex: NetRegexes.startsUsing({ source: 'Trinity Avowed', id: '5985', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Trinität Der Eingeschworenen', id: '5985', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Trinité Féale', id: '5985', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'トリニティ・アヴァウド', id: '5985', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '誓约之三位一体', id: '5985', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '맹세의 삼위일체', id: '5985', capture: false }),
alertText: (_data, _matches, output) => output.text!(),
run: (data) => data.avowedPhase = 'sword',
outputStrings: {
text: {
en: 'Get In Front',
de: 'Geh vor den Boss',
fr: 'Soyez devant',
ja: 'ボスの正面へ',
cn: '去Boss正面',
ko: '보스 정면에 서기',
},
},
},
{
id: 'DelubrumSav Avowed Hot And Cold Cleanup',
type: 'StartsUsing',
// On Hot and Cold casts. This will clean up any lingering forced march from bow phase 1.
netRegex: NetRegexes.startsUsing({ source: 'Trinity Avowed', id: ['5BB0', '5BAF', '597B'], capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Trinität Der Eingeschworenen', id: ['5BB0', '5BAF', '597B'], capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Trinité Féale', id: ['5BB0', '5BAF', '597B'], capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'トリニティ・アヴァウド', id: ['5BB0', '5BAF', '597B'], capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '誓约之三位一体', id: ['5BB0', '5BAF', '597B'], capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '맹세의 삼위일체', id: ['5BB0', '5BAF', '597B'], capture: false }),
run: (data) => {
delete data.currentTemperature;
delete data.currentBrand;
delete data.forcedMarch;
delete data.blades;
},
},
{
id: 'DelubrumSav Avowed Temperature Collect',
type: 'GainsEffect',
// These come from Environment, Trinity Avowed, Avowed Avatar, Swirling Orb
// 89C Normal
// 89D Running Hot: +1
// 8DC Running Cold: -1
// 8E2 Running Cold: -2
// 8A4 Running Hot: +2
netRegex: NetRegexes.gainsEffect({ effectId: ['89C', '89D', '8DC', '8E2', '8A4'] }),
condition: Conditions.targetIsYou(),
run: (data, matches) => {
const temperature: { [id: string]: number } = {
'89C': 0,
'89D': 1,
'8DC': -1,
'8E2': -2,
'8A4': 2,
};
data.currentTemperature = temperature[matches.effectId.toUpperCase()];
},
},
{
id: 'DelubrumSav Avowed Brand Collect',
type: 'GainsEffect',
// These come from Environment, E0000000
// 8E5 Hot Brand: +1
// 8F3 Hot Brand: +2
// 8F4 Cold Brand: +1
// 8F8 Cold Brand: +2
netRegex: NetRegexes.gainsEffect({ effectId: ['8E5', '8F3', '8F4', '8F8'] }),
condition: Conditions.targetIsYou(),
run: (data, matches) => {
const brand: { [id: string]: number } = {
'8E5': 1,
'8F3': 2,
'8F4': -1,
'8F8': -2,
};
data.currentBrand = brand[matches.effectId.toUpperCase()];
},
},
{
id: 'DelubrumSav Avowed March Collect',
type: 'GainsEffect',
// 50D Forward March
// 50E About Face
// 50F Left Face
// 510 Right Face
netRegex: NetRegexes.gainsEffect({ effectId: ['50D', '50E', '50F', '510'] }),
condition: Conditions.targetIsYou(),
run: (data, matches) => data.forcedMarch = matches.effectId.toUpperCase(),
},
{
id: 'DelubrumSav Avowed Blade of Entropy Collect',
type: 'StartsUsing',
// Used to get whether left or right cleave is happening and temperature value
// Trinity Avowed or Avowed Avatar cast these pairs
// +1 Cleaves
// 5942 = right cleave, heat (1) paired with 5944
// 5943 = right cleave, cold (1) paired with 5945
// 5944 = right cleave, heat (1) paired with 5942
// 5945 = right cleave, cold (1) paired with 5943
//
// 5946 = left cleave, cold (1) paired with 5948
// 5947 = left cleave, heat (1) paired with 5949
// 5948 = left cleave, cold (1) paired with 5946
// 5949 = left cleave, heat (1) paired with 5947
//
// +2 Cleaves
// 5956 = right cleave, heat (2) paired with 5958
// 5957 = right cleave, cold (2) paired with 5959
// 5958 = right cleave, heat (2) paired with 5956
// 5959 = right cleave, cold (2) paired with 5957
//
// 595A = left cleave heat (2) paired with 595C
// 595B = left cleave cold (2) paired with 595D
// 595C = left cleave heat (2) paired with 595A
// 595D = left cleave cold (2) paired with 595B
netRegex: NetRegexes.startsUsing({ source: ['Trinity Avowed', 'Avowed Avatar'], id: ['5942', '5943', '5946', '5947', '5956', '5957', '595A', '595B'] }),
netRegexDe: NetRegexes.startsUsing({ source: ['Trinität Der Eingeschworenen', 'Spaltteil der Eingeschworenen'], id: ['5942', '5943', '5946', '5947', '5956', '5957', '595A', '595B'] }),
netRegexFr: NetRegexes.startsUsing({ source: ['Trinité Féale', 'Clone De La Trinité Féale'], id: ['5942', '5943', '5946', '5947', '5956', '5957', '595A', '595B'] }),
netRegexJa: NetRegexes.startsUsing({ source: ['トリニティ・アヴァウド', 'アヴァウドの分体'], id: ['5942', '5943', '5946', '5947', '5956', '5957', '595A', '595B'] }),
netRegexCn: NetRegexes.startsUsing({ source: ['誓约之三位一体', '誓约之分身'], id: ['5942', '5943', '5946', '5947', '5956', '5957', '595A', '595B'] }),
netRegexKo: NetRegexes.startsUsing({ source: ['맹세의 삼위일체', '맹세의 분열체'], id: ['5942', '5943', '5946', '5947', '5956', '5957', '595A', '595B'] }),
run: (data, matches) => {
data.blades ??= {};
data.blades[parseInt(matches.sourceId, 16)] = matches.id.toUpperCase();
},
},
{
id: 'DelubrumSav Avowed Hot And Cold Shimmering Shot',
type: 'StartsUsing',
netRegex: NetRegexes.startsUsing({ source: 'Trinity Avowed', id: '597F', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Trinität Der Eingeschworenen', id: '597F', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Trinité Féale', id: '597F', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'トリニティ・アヴァウド', id: '597F', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '誓约之三位一体', id: '597F', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '맹세의 삼위일체', id: '597F', capture: false }),
durationSeconds: 5,
alertText: (data, _matches, output) => {
const currentBrand = data.currentBrand ?? 0;
const currentTemperature = data.currentTemperature ?? 0;
const effectiveTemperature = (currentTemperature + currentBrand).toString();
const tempToOutput: { [temp: string]: string } = {
'-2': output.plusTwo!(),
'-1': output.plusOne!(),
'0': output.emptySpot!(),
'1': output.minusOne!(),
'2': output.minusTwo!(),
};
const arrowStr = effectiveTemperature in tempToOutput
? tempToOutput[effectiveTemperature]
: output.unknownTemperature!();
const marchStrMap: { [id: string]: string } = {
'50D': output.forwards!(),
'50E': output.backwards!(),
'50F': output.left!(),
'510': output.right!(),
};
if (data.forcedMarch) {
const marchStr = marchStrMap[data.forcedMarch];
return output.marchToArrow!({ arrow: arrowStr, dir: marchStr });
}
return output.followArrow!({ arrow: arrowStr });
},
outputStrings: {
plusTwo: {
en: '+2 Heat Arrow',
de: '+2 Heiß-Pfeile',
fr: 'Flèche de chaleur +2',
ja: '炎属性+2',
cn: '火+2箭',
ko: '+2 불 화살',
},
plusOne: {
en: '+1 Heat Arrow',
de: '+1 Heiß-Pfeile',
fr: 'Flèche de chaleur +1',
ja: '炎属性+1',
cn: '火+1箭',
ko: '+1 불 화살',
},
emptySpot: {
en: 'Empty Spot',
de: 'Leeres Feld',
fr: 'Emplacement vide',
ja: 'そのままにする',
cn: '空白',
ko: '빈 자리',
},
minusOne: {
en: '-1 Cold Arrow',
de: '-1 Kalt-Pfeile',
fr: 'Flèche de froid -1',
ja: '氷属性-1',
cn: '冰-1箭',
ko: '-1 얼음 화살',
},
minusTwo: {
en: '-2 Cold Arrow',
de: '-2 Kalt-Pfeile',
fr: 'Flèche de froid -2',
ja: '氷属性-2',
cn: '冰-2箭',
ko: '-2 얼음 화살',
},
unknownTemperature: {
en: 'Opposite Arrow',
de: 'Entgegengesetze Pfeile',
fr: 'Flèche de l\'élément opposé',
ja: '体温と逆のあみだ',
cn: '相反温度的箭',
ko: '반대속성 화살',
},
forwards: {
en: 'forwards',
de: 'Vorwärts',
fr: 'Vers l\'avant',
ja: '前',
cn: '前',
ko: '앞',
},
backwards: {
en: 'backwards',
de: 'Rückwärts',
fr: 'Vers l\'arrière',
ja: '後ろ',
cn: '后',
ko: '뒤',
},
left: {
en: 'left',
de: 'Links',
fr: 'À gauche',
ja: '左',
cn: '左',
ko: '왼쪽',
},
right: {
en: 'right',
de: 'Rechts',
fr: 'À droite',
ja: '右',
cn: '右',
ko: '오른쪽',
},
followArrow: {
en: 'Follow ${arrow}',
de: 'Folge ${arrow}',
fr: 'Suivez ${arrow}',
ja: '${arrow}に従う',
cn: '接${arrow}',
ko: '${arrow}쪽으로',
},
marchToArrow: {
en: 'March ${dir} to ${arrow}',
de: 'Marchiere ${dir} zum ${arrow}',
fr: 'Marqueur ${dir} de ${arrow}',
ja: '強制移動: ${dir} > ${arrow}',
cn: '强制移动:${dir} > ${arrow}',
ko: '강제이동 ${dir} > ${arrow}',
},
},
},
{
id: 'DelubrumSav Avowed Hot And Cold Freedom Of Bozja',
type: 'StartsUsing',
netRegex: NetRegexes.startsUsing({ source: 'Trinity Avowed', id: '597C', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Trinität Der Eingeschworenen', id: '597C', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Trinité Féale', id: '597C', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'トリニティ・アヴァウド', id: '597C', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '誓约之三位一体', id: '597C', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '맹세의 삼위일체', id: '597C', capture: false }),
delaySeconds: 7,
durationSeconds: 5,
alertText: (data, _matches, output) => {
const currentBrand = data.currentBrand ? data.currentBrand : 0;
const currentTemperature = data.currentTemperature ? data.currentTemperature : 0;
const effectiveTemperature = (currentTemperature + currentBrand).toString();
const tempToOutput: { [temp: string]: string } = {
'-2': output.plusTwo!(),
'-1': output.plusOne!(),
'1': output.minusOne!(),
'2': output.minusTwo!(),
};
const meteorStr = effectiveTemperature in tempToOutput
? tempToOutput[effectiveTemperature]
: output.unknownTemperature!();
const marchStrMap: { [id: string]: string } = {
'50D': output.forwards!(),
'50E': output.backwards!(),
'50F': output.left!(),
'510': output.right!(),
};
if (data.forcedMarch) {
const marchStr = marchStrMap[data.forcedMarch];
return output.marchToMeteor!({ meteor: meteorStr, dir: marchStr });
}
return output.goToMeteor!({ meteor: meteorStr });
},
outputStrings: {
plusTwo: {
en: '+2 Heat Meteor',
de: '+2 Heiß-Meteor',
fr: 'Météore de chaleur +2',
ja: '炎属性+2',
cn: '火+2陨石',
ko: '+2 불 운석',
},
plusOne: {
en: '+1 Heat Meteor',
de: '+1 Heiß-Meteor',
fr: 'Météore de chaleur +1',
ja: '炎属性+1',
cn: '火+1陨石',
ko: '+1 불 운석',
},
minusOne: {
en: '-1 Cold Meteor',
de: '-1 Kalt-Meteor',
fr: 'Météore de froid -1',
ja: '氷属性-1',
cn: '冰-1陨石',
ko: '-1 얼음 운석',
},
minusTwo: {
en: '-2 Cold Meteor',
de: '-2 Kalt-Meteor',
fr: 'Météore de froid -2',
ja: '氷属性-2',
cn: '冰-2陨石',
ko: '-2 얼음 운석',
},
unknownTemperature: {
en: 'Opposite Meteor',
de: 'Entgegengesetzer Meteor',
fr: 'Météore de l\'élément opposé',
ja: '体温と逆のメテオを受ける',
cn: '去相反温度的陨石',
ko: '반대속성 운석',
},
forwards: {
en: 'forwards',
de: 'Vorwärts',
fr: 'Vers l\'avant',
ja: '前',
cn: '前',
ko: '앞',
},
backwards: {
en: 'backwards',
de: 'Rückwärts',
fr: 'Vers l\'arrière',
ja: '後ろ',
cn: '后',
ko: '뒤',
},
left: {
en: 'left',
de: 'Links',
fr: 'À gauche',
ja: '左',
cn: '左',
ko: '왼쪽',
},
right: {
en: 'right',
de: 'Rechts',
fr: 'À droite',
ja: '右',
cn: '右',
ko: '오른쪽',
},
goToMeteor: {
en: 'Go to ${meteor} (watch clones)',
de: 'Gehe zum ${meteor} (beachte die Klone)',
fr: 'Allez au ${meteor} (regardez les clones)',
ja: '${meteor}へ (分体を見る)',
cn: '去${meteor},观察分身',
ko: '${meteor}쪽으로 (분신 위치 확인)',
},
marchToMeteor: {
en: 'March ${dir} to ${meteor}',
de: 'Marchiere ${dir} zum ${meteor}',
fr: 'Marqueur ${dir} du ${meteor}',
ja: '強制移動: ${dir} > ${meteor}',
cn: '强制移动:${dir} > ${meteor}',
ko: '강제이동 ${dir} > ${meteor}',
},
},
},
{
id: 'DelubrumSav Avowed Hot And Cold Unwavering Apparations',
type: 'GainsEffect',
// The buffs come out before the spell cast
// Trinity Avowed and/or Avowed Avatar receive one of these buffs:
// 8F9: Hot Blade: +1
// 8FA: Hot Blade: +2
// 8FB: Cold Blade: -1
// 8FC: Cold Blade: -2
// Positional data in statusEffectsParams is often not up to date, use promise
//
// Trigger delayed until after Blade Of Entropy happens about ~100ms after
// to get left/right cleave info
// Ignoring Trinity Avowed due to Environment 'randomly' refreshing its buff
netRegex: NetRegexes.gainsEffect({ target: 'Avowed Avatar', effectId: ['8F9', '8FA', '8FB', '8FC'], capture: false }),
netRegexDe: NetRegexes.gainsEffect({ target: 'Spaltteil der Eingeschworenen', effectId: ['8F9', '8FA', '8FB', '8FC'], capture: false }),
netRegexFr: NetRegexes.gainsEffect({ target: 'Clone De La Trinité Féale', effectId: ['8F9', '8FA', '8FB', '8FC'], capture: false }),
netRegexJa: NetRegexes.gainsEffect({ target: 'アヴァウドの分体', effectId: ['8F9', '8FA', '8FB', '8FC'], capture: false }),
netRegexCn: NetRegexes.gainsEffect({ target: '誓约之分身', effectId: ['8F9', '8FA', '8FB', '8FC'], capture: false }),
netRegexKo: NetRegexes.gainsEffect({ target: '맹세의 분열체', effectId: ['8F9', '8FA', '8FB', '8FC'], capture: false }),
delaySeconds: 0.5,
durationSeconds: 9.5,
suppressSeconds: 1,
promise: async (data, _matches, output) => {
const trinityLocaleNames: LocaleText = {
en: 'Trinity Avowed',
de: 'Trinität Der Eingeschworenen',
fr: 'Trinité féale',
ja: 'トリニティ・アヴァウ',
cn: '誓约之三位一体',
ko: '맹세의 삼위일체',
};
const avatarLocaleNames: LocaleText = {
en: 'Avowed Avatar',
de: 'Spaltteil der Eingeschworenen',
fr: 'Clone de la Trinité féale',
ja: 'アヴァウドの分体',
cn: '誓约之分身',
ko: '맹세의 분열체',
};
// select the Trinity and Avatars
let combatantNameBoss = null;
let combatantNameAvatar = null;
combatantNameBoss = trinityLocaleNames[data.parserLang];
combatantNameAvatar = avatarLocaleNames[data.parserLang];
let combatantDataBoss = null;
let combatantDataAvatars = null;
if (combatantNameBoss) {
combatantDataBoss = await callOverlayHandler({
call: 'getCombatants',
names: [combatantNameBoss],
});
}
if (combatantNameAvatar) {
combatantDataAvatars = await callOverlayHandler({
call: 'getCombatants',
names: [combatantNameAvatar],
});
}
// if we could not retrieve combatant data, the
// trigger will not work, so just resume promise here
if (combatantDataBoss === null) {
console.error(`Trinity Avowed: null data`);
delete data.safeZone;
return;
}
if (!combatantDataBoss.combatants) {
console.error(`Trinity Avowed: null combatants`);
delete data.safeZone;
return;
}
if (combatantDataAvatars === null) {
console.error(`Avowed Avatar: null data`);
delete data.safeZone;
return;
}
if (!combatantDataAvatars.combatants) {
console.error(`Avowed Avatar: null combatants`);
delete data.safeZone;
return;
}
if (combatantDataAvatars.combatants.length < 3) {
console.error(`Avowed Avatar: expected at least 3 combatants got ${combatantDataAvatars.combatants.length}`);
delete data.safeZone;
return;
}
if (!data.blades) {
console.error(`Avowed Avatar: missing blades`);
delete data.safeZone;
return;
}
const getFacing = (combatant: PluginCombatantState) => {
// Snap heading to closest card.
// N = 0, E = 1, S = 2, W = 3
return (2 - Math.round(combatant.Heading * 4 / Math.PI) / 2) % 4;
};
const getUnwaveringPosition = (combatant: PluginCombatantState) => {
// Positions are moved downward 87 and left 277
const y = combatant.PosY + 87;
const x = combatant.PosX + 277;
// N = 0, E = 1, S = 2, W = 3
return Math.round(2 - 2 * Math.atan2(x, y) / Math.PI) % 4;
};
// we need to filter for the Trinity Avowed with the lowest ID
// that one is always cleaving on one of the cardinals
// Trinity Avowed is always East (-267, -87)
const sortCombatants = (a: PluginCombatantState, b: PluginCombatantState) => (a.ID ?? 0) - (b.ID ?? 0);
const eastCombatant = combatantDataBoss.combatants.sort(sortCombatants).shift();
// we need to filter for the three Avowed Avatars with the lowest IDs
// as they cast cleave at the different cardinals
const [avatarOne, avatarTwo, avatarThree] = combatantDataAvatars.combatants.sort(sortCombatants);
if (!avatarOne || !avatarTwo || !avatarThree)
throw new UnreachableCode();
const combatantPositions: PluginCombatantState[] = [];
combatantPositions[getUnwaveringPosition(avatarOne)] = avatarOne;
combatantPositions[getUnwaveringPosition(avatarTwo)] = avatarTwo;
combatantPositions[getUnwaveringPosition(avatarThree)] = avatarThree;
// Avowed Avatars can spawn in the other positions
// Determine the location of Avowed Avatars
// North Avowed Avatar (-277, -97)
// South Avowed Avatar (-277, -77)
// West Avowed Avatar (-277, -87)
const [northCombatant, , southCombatant, westCombatant] = combatantPositions;
if (!northCombatant || !southCombatant || !westCombatant)
throw new UnreachableCode();
// Get facings
const northCombatantFacing = getFacing(northCombatant);
const southCombatantFacing = getFacing(southCombatant);
// Get Blade of Entropy data
const eastCombatantBlade = data.blades[eastCombatant?.ID ?? 0];
const northCombatantBlade = data.blades[northCombatant?.ID ?? 0];
const westCombatantBlade = data.blades[westCombatant?.ID ?? 0];
const southCombatantBlade = data.blades[southCombatant?.ID ?? 0];
if (
eastCombatantBlade === undefined || northCombatantBlade === undefined ||
westCombatantBlade === undefined || southCombatantBlade === undefined
)
throw new UnreachableCode();
const bladeValues: { [id: string]: number } = {
'5942': 1,
'5943': -1,
'5946': 1,
'5947': -1,
'5956': 2,
'5957': -2,
'595A': 2,
'595B': -2,
};
// 1 = Right
// 0 = Left
const bladeSides: { [id: string]: number } = {
'5942': 1,
'5943': 1,
'5946': 0,
'5947': 0,
'5956': 1,
'5957': 1,
'595A': 0,
'595B': 0,
};
const eastCombatantBladeValue = bladeValues[eastCombatantBlade];
const northCombatantBladeValue = bladeValues[northCombatantBlade];
const westCombatantBladeValue = bladeValues[westCombatantBlade];
const southCombatantBladeValue = bladeValues[southCombatantBlade];
if (
eastCombatantBladeValue === undefined || northCombatantBladeValue === undefined ||
westCombatantBladeValue === undefined || southCombatantBladeValue === undefined
)
throw new UnreachableCode();
// Create map to improve readability of safeZone conditions
const dirNum = { north: 0, east: 1, south: 2, west: 3 };
// Only need to check cleaves from two parallel clones to determine safe spots
// because if the clone is cleaving inside, then we know where other clones
// are cleaving in order to make a '+' where the ends are each cleaved by one
// clone and the middle square is safe
let safeZone = null;
let adjacentZones: { [dir: number]: number } = {};
if (
(northCombatantFacing === dirNum.north && bladeSides[northCombatantBlade]) ||
(northCombatantFacing === dirNum.south && !bladeSides[northCombatantBlade])
) {
// North clone cleaving inside east (and therefore east clone cleaving north).
safeZone = output.southwest!();
adjacentZones = {
[dirNum.north]: eastCombatantBladeValue,
[dirNum.east]: northCombatantBladeValue,
[dirNum.south]: southCombatantBladeValue,
[dirNum.west]: westCombatantBladeValue,
};
} else if (
(northCombatantFacing === dirNum.north && !bladeSides[northCombatantBlade]) ||
(northCombatantFacing === dirNum.south && bladeSides[northCombatantBlade])
) {
// North clone cleaving inside west (and therefore west clone cleaving north).
safeZone = output.southeast!();
adjacentZones = {
[dirNum.north]: westCombatantBladeValue,
[dirNum.east]: eastCombatantBladeValue,
[dirNum.south]: southCombatantBladeValue,
[dirNum.west]: northCombatantBladeValue,
};
} else if (
(southCombatantFacing === dirNum.south && bladeSides[southCombatantBlade]) ||
(southCombatantFacing === dirNum.north && !bladeSides[southCombatantBlade])
) {
// South clone cleaving inside west (and therefore west clone cleaving south).
safeZone = output.northeast!();
adjacentZones = {
[dirNum.north]: northCombatantBladeValue,
[dirNum.east]: eastCombatantBladeValue,
[dirNum.south]: westCombatantBladeValue,
[dirNum.west]: southCombatantBladeValue,
};
} else if (
(southCombatantFacing === dirNum.north && bladeSides[southCombatantBlade]) ||
(southCombatantFacing === dirNum.south && !bladeSides[southCombatantBlade])
) {
// South clone cleaving inside east (and therefore east clone cleaving south).
safeZone = output.northwest!();
adjacentZones = {
[dirNum.north]: northCombatantBladeValue,
[dirNum.east]: southCombatantBladeValue,
[dirNum.south]: eastCombatantBladeValue,
[dirNum.west]: westCombatantBladeValue,
};
} else {
// facing did not evaluate properly
console.error(
`Avowed Avatar: facing error, ` +
`${northCombatantFacing}, ${southCombatantFacing}, ` +
`${JSON.stringify(bladeSides[northCombatantBlade])}, ` +
`${JSON.stringify(bladeSides[southCombatantBlade])}`,
);
data.safeZone = output.unknown!();
return;
}
const currentBrand = data.currentBrand ? data.currentBrand : 0;
const currentTemperature = data.currentTemperature ? data.currentTemperature : 0;
const effectiveTemperature = currentTemperature + currentBrand;
// Calculate which adjacent zone to go to, if needed
let adjacentZone = null;
if (effectiveTemperature && adjacentZones) {
// Find the adjacent zone that gets closest to 0
const calculatedZones = Object.values(adjacentZones).map((i: number) => Math.abs(effectiveTemperature + i));
// Use zone closest to zero as output
const dirs = {
[dirNum.north]: output.north!(),
[dirNum.east]: output.east!(),
[dirNum.south]: output.south!(),
[dirNum.west]: output.west!(),
};
const zoneClosestToZero = [...calculatedZones].sort((a, b) => b - a).pop();
if (zoneClosestToZero === undefined)
throw new UnreachableCode();
adjacentZone = dirs[calculatedZones.indexOf(zoneClosestToZero)];
if (adjacentZone === undefined)
throw new UnreachableCode();
}
// Callout safe spot and get cleaved spot if both are known
// Callout safe spot only if no need to be cleaved
if (adjacentZone) {
data.safeZone = output.getCleaved!({ dir1: safeZone, dir2: adjacentZone });
} else if (safeZone) {
data.safeZone = output.safeSpot!({ dir: safeZone });
} else {
console.error(`Avowed Avatar: undefined zones`);
data.safeZone = output.unknown!();
}
},
alertText: (data, _matches, output) => !data.safeZone ? output.unknown!() : data.safeZone,
outputStrings: {
getCleaved: {
en: '${dir1} Safe Spot => ${dir2} for cleave',
de: 'Sichere Stelle ${dir1} => ${dir2} für Cleave',
fr: '${dir1} Zone sûre => ${dir2} pour le cleave',
ja: '${dir1}に安置 => ${dir2}範囲攻撃に',
cn: '去${dir1}方安全点 => 去${dir2}吃顺劈',
ko: '${dir1} 안전 지대 => ${dir2} 광역 맞기',
},
safeSpot: {
en: '${dir} Safe Spot',
de: 'Sichere Stelle ${dir}',
fr: '${dir} Zone sûre',
ja: '${dir}に安置',
cn: '去${dir}方安全点',
ko: '${dir} 안전 지대',
},
unknown: Outputs.unknown,
north: Outputs.north,
northeast: Outputs.northeast,
east: Outputs.east,
southeast: Outputs.southeast,
south: Outputs.south,
southwest: Outputs.southwest,
west: Outputs.west,
northwest: Outputs.northwest,
},
},
{
id: 'DelubrumSav Avowed Gleaming Arrow Collect',
type: 'StartsUsing',
netRegex: NetRegexes.startsUsing({ source: 'Avowed Avatar', id: '594D' }),
netRegexDe: NetRegexes.startsUsing({ source: 'Spaltteil Der Eingeschworenen', id: '594D' }),
netRegexFr: NetRegexes.startsUsing({ source: 'Clone De La Trinité Féale', id: '594D' }),
netRegexJa: NetRegexes.startsUsing({ source: 'アヴァウドの分体', id: '594D' }),
netRegexCn: NetRegexes.startsUsing({ source: '誓约之分身', id: '594D' }),
netRegexKo: NetRegexes.startsUsing({ source: '맹세의 분열체', id: '594D' }),
run: (data, matches) => {
data.unseenIds ??= [];
data.unseenIds.push(parseInt(matches.sourceId, 16));
},
},
{
id: 'DelubrumSav Avowed Gleaming Arrow',
type: 'StartsUsing',
netRegex: NetRegexes.startsUsing({ source: 'Avowed Avatar', id: '594D', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Spaltteil Der Eingeschworenen', id: '594D', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Clone De La Trinité Féale', id: '594D', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'アヴァウドの分体', id: '594D', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '誓约之分身', id: '594D', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '맹세의 분열체', id: '594D', capture: false }),
delaySeconds: 0.5,
suppressSeconds: 10,
promise: async (data) => {
const unseenIds = data.unseenIds;
if (!unseenIds)
return;
const unseenData = await callOverlayHandler({
call: 'getCombatants',
ids: unseenIds,
});
if (unseenData === null) {
console.error(`Gleaming Arrow: null data`);
return;
}
if (!unseenData.combatants) {
console.error(`Gleaming Arrow: null combatants`);
return;
}
if (unseenData.combatants.length !== unseenIds.length) {
console.error(`Gleaming Arrow: expected ${unseenIds.length}, got ${unseenData.combatants.length}`);
return;
}
data.unseenBadRows = [];
data.unseenBadCols = [];
for (const avatar of unseenData.combatants) {
const x = avatar.PosX - avowedCenterX;
const y = avatar.PosY - avowedCenterY;
// y=-107 = north side, x = -252, -262, -272, -282, -292
// x=-247 = left side, y = -62, -72, -82, -92, -102
// Thus, the possible deltas are -20, -10, 0, +10, +20.
// The other coordinate is +/-25 from center.
const maxDist = 22;
if (Math.abs(x) < maxDist) {
const col = Math.round((x + 20) / 10);
data.unseenBadCols.push(col);
}
if (Math.abs(y) < maxDist) {
const row = Math.round((y + 20) / 10);
data.unseenBadRows.push(row);
}
}
data.unseenBadRows.sort();
data.unseenBadCols.sort();
},
alertText: (data, _matches, output) => {
delete data.unseenIds;
const rows = data.unseenBadRows;
const cols = data.unseenBadCols;
if (!rows || !cols)
return;
if (data.avowedPhase === 'bow') {
// consider asserting that badCols are 0, 2, 4 here.
if (rows.includes(2))
return output.bowLight!();
return output.bowDark!();
}
if (data.avowedPhase !== 'staff')
return;
if (cols.includes(1)) {
if (rows.includes(1))
return output.staffOutsideCorner!();
return output.staffOutsideColInsideRow!();
}
if (cols.includes(0)) {
if (rows.includes(0))
return output.staffInsideCorner!();
return output.staffInsideColOutsideRow!();
}
},
outputStrings: {
bowDark: {
en: 'Dark (E/W of center)',
de: 'Dunkel (O/W von der Mitte)',
fr: 'Foncée (E/O du centre)',
ja: 'ダーク床 (東/西)',
cn: '内圈东西(左右)暗色地板',
ko: '어두운 타일 (맵 중앙의 왼/오른쪽)',
},
bowLight: {
en: 'Light (diagonal from center)',
de: 'Licht (Diagonal von der Mitte)',
fr: 'Claire (diagonale du centre)',
ja: 'ライト床 (中の対角)',
cn: '内圈四角亮色地板',
ko: '밝은 타일 (맵 중앙의 대각선)',
},
staffOutsideCorner: {
en: 'Outside Corner',
de: 'Äußere Ecken',
fr: 'Coin extérieur',
ja: '外のコーナー',
cn: '外圈四角亮色地板',
ko: '맵 구석의 밝은 타일',
},
staffInsideCorner: {
en: 'Inside Corner',
de: 'Innere Ecken',
fr: 'Coin intérieur',
ja: '中のコーナー',
cn: '内圈四角亮色地板',
ko: '구석에서 한칸 안쪽 밝은 타일',
},
staffOutsideColInsideRow: {
en: 'N/S of Corner',
de: 'N/S von der Ecke',
fr: 'N/S du coin',
ja: '南北行のダーク床',
cn: '外圈南北(上下)暗色地板',
ko: '맵 구석에서 북/남쪽의 어두운 타일',
},
staffInsideColOutsideRow: {
en: 'E/W of Corner',
de: 'O/W von der Ecke',
fr: 'E/O du coin',
ja: '東西列のダーク床',
cn: '外圈东西(左右)暗色地板',
ko: '맵 구석에서 왼/오른쪽의 어두운 타일',
},
},
},
{
id: 'DelubrumSav Lord Foe Splitter',
type: 'StartsUsing',
netRegex: NetRegexes.startsUsing({ source: 'Stygimoloch Lord', id: '57D7' }),
netRegexDe: NetRegexes.startsUsing({ source: 'Anführer-Stygimoloch', id: '57D7' }),
netRegexFr: NetRegexes.startsUsing({ source: 'Seigneur Stygimoloch', id: '57D7' }),
netRegexJa: NetRegexes.startsUsing({ source: 'スティギモロク・ロード', id: '57D7' }),
netRegexCn: NetRegexes.startsUsing({ source: '冥河之王', id: '57D7' }),
netRegexKo: NetRegexes.startsUsing({ source: '스티키몰로크 군주', id: '57D7' }),
// THANKFULLY this starts using comes out immediately before the headmarker line.
preRun: (data) => data.firstUnknownHeadmarker = headmarker.foeSplitter,
response: (data, matches, output) => {
// cactbot-builtin-response
output.responseOutputStrings = {
cleaveOnYou: Outputs.tankCleaveOnYou,
cleaveNoTarget: Outputs.tankCleave,
avoidCleave: Outputs.avoidTankCleave,
cleaveOn: {
en: 'Tank Cleave on ${player}',
de: 'Tank Cleave auf ${player}',
fr: 'Tank Cleave sur ${player}',
ja: '${player}に範囲攻撃',
cn: '顺劈: ${player}',
ko: '${player}에게 광역 탱버',
},
};
if (matches.target === data.me)
return { alarmText: output.cleaveOnYou!() };
if (tankBusterOnParty(data, matches))
return { alertText: output.cleaveOn!({ player: data.ShortName(matches.target) }) };
return { infoText: output.avoidCleave!() };
},
},
{
id: 'DelubrumSav Lord Rapid Bolts',
type: 'HeadMarker',
netRegex: NetRegexes.headMarker(),
condition: (data, matches) => {
if (data.me !== matches.target)
return false;
return getHeadmarkerId(data, matches) === headmarker.thunder;
},
alarmText: (_data, _matches, output) => output.text!(),
outputStrings: {
text: {
en: 'Drop thunder outside',
de: 'Lege Blitz draußen ab',
fr: 'Déposez la foudre à l\'extérieur',
ja: '外に捨てる',
cn: '外圈放雷',
ko: '바깥에 번개장판 놓기',
},
},
},
{
id: 'DelubrumSav Lord Labyrinthine Fate Collect',
type: 'GainsEffect',
// 97E: Wanderer's Fate, Pushes outward on Fateful Word cast
// 97F: Sacrifice's Fate, Pulls to middle on Fateful Word cast
netRegex: NetRegexes.gainsEffect({ effectId: '97[EF]' }),
condition: Conditions.targetIsYou(),
preRun: (data, matches) => {
data.labyrinthineFate = matches.effectId.toUpperCase();
},
// This effect is given repeatedly.
suppressSeconds: 30,
infoText: (data, _matches, output) => {
// The first time this happens, there is ~2.5 seconds between debuff application
// and the start of the cast to execute that debuff. Be less noisy on the first.
if (!data.seenLabyrinthineFate)
return;
if (data.labyrinthineFate === '97F')
return output.getOutLater!();
if (data.labyrinthineFate === '97E')
return output.getInLater!();
},
run: (data) => data.seenLabyrinthineFate = true,
outputStrings: {
getOutLater: {
en: '(sacrifice out, for later)',
de: '(Heranziehen raus, für später)',
fr: '(sacrifice à l\'extérieur, pour plus tard)',
ja: '(外の切れ目に引き付ける)',
cn: '(外缺口等待吸引)',
ko: '(저주의 말 시전하면 바깥쪽에 서기)',
},
getInLater: {
en: '(wanderer in, for later)',
de: '(Zurückschleudern rein, für später)',
fr: '(errant à l\'intérieur, pour plus tard)',
ja: '(中の切れ目に吹き飛ばす)',
cn: '(内缺口等待击退)',
ko: '(저주의 말 시전하면 안쪽에 서기)',
},
},
},
{
id: 'DelubrumSav Lord Fateful Words',
type: 'StartsUsing',
netRegex: NetRegexes.startsUsing({ source: 'Stygimoloch Lord', id: '57C9', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Anführer-Stygimoloch', id: '57C9', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Seigneur Stygimoloch', id: '57C9', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'スティギモロク・ロード', id: '57C9', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '冥河之王', id: '57C9', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '스티키몰로크 군주', id: '57C9', capture: false }),
// 97E: Wanderer's Fate, Pushes outward on Fateful Word cast
// 97F: Sacrifice's Fate, Pulls to middle on Fateful Word cast
// Labyrinthine Fate is cast and 1 second later debuffs are applied
// First set of debuffs go out 7.7 seconds before Fateful Word is cast
// Remaining set of debuffs go out 24.3 seconds before Fateful Word is cast
alertText: (data, _matches, output) => {
if (data.labyrinthineFate === '97F')
return output.getOut!();
if (data.labyrinthineFate === '97E')
return output.getIn!();
},
// In case you die and don't get next debuff, clean this up so it doesn't call again.
run: (data) => delete data.labyrinthineFate,
outputStrings: {
getOut: Outputs.out,
getIn: Outputs.in,
},
},
{
id: 'DelubrumSav Lord Devastating Bolt',
type: 'StartsUsing',
netRegex: NetRegexes.startsUsing({ source: 'Stygimoloch Lord', id: '57C5', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Anführer-Stygimoloch', id: '57C5', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Seigneur Stygimoloch', id: '57C5', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'スティギモロク・ロード', id: '57C5', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '冥河之王', id: '57C5', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '스티키몰로크 군주', id: '57C5', capture: false }),
durationSeconds: 4,
suppressSeconds: 1,
alertText: (_data, _matches, output) => output.text!(),
outputStrings: {
text: {
en: 'Get In Nook',
de: 'Geh in die Ecke',
fr: 'Allez dans un recoin',
ja: '切れ目に入る',
cn: '进入缺口',
ko: '틈새에 들어가기',
},
},
},
{
id: 'DelubrumSav Lord 1111-Tonze Swing',
type: 'StartsUsing',
netRegex: NetRegexes.startsUsing({ source: 'Stygimoloch Lord', id: '57D8', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Anführer-Stygimoloch', id: '57D8', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Seigneur Stygimoloch', id: '57D8', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'スティギモロク・ロード', id: '57D8', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '冥河之王', id: '57D8', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '스티키몰로크 군주', id: '57D8', capture: false }),
response: Responses.getOut(),
},
{
id: 'DelubrumSav Queen Cleansing Slash',
type: 'StartsUsing',
// PLD and GNB tank invulnerabilities do not get Physical Vulnerability Up
// Tank swap will be required between the two hits if not using a tank invulnerability
// Tank swap required after second hit if not using PLD or GNB tank invulnerabilities
// To avoid bad swaps between 11 other tanks, only mention swap to targetted tank
netRegex: NetRegexes.startsUsing({ source: 'The Queen', id: '59F5' }),
netRegexDe: NetRegexes.startsUsing({ source: 'Kriegsgöttin', id: '59F5' }),
netRegexFr: NetRegexes.startsUsing({ source: 'Garde-La-Reine', id: '59F5' }),
netRegexJa: NetRegexes.startsUsing({ source: 'セイブ・ザ・クイーン', id: '59F5' }),
netRegexCn: NetRegexes.startsUsing({ source: '天佑女王', id: '59F5' }),
netRegexKo: NetRegexes.startsUsing({ source: '세이브 더 퀸', id: '59F5' }),
response: (data, matches, output) => {
// cactbot-builtin-response
output.responseOutputStrings = {
tankBusterAndSwap: {
en: 'Tank Buster + Swap',
de: 'Tankbuster + Wechsel',
fr: 'Tank buster + Swap',
ja: 'タンクバスター + スイッチ',
cn: '死刑 + 换T',
ko: '탱버 + 교대',
},
tankBusterOnYou: Outputs.tankBusterOnYou,
tankBusterOnPlayer: Outputs.tankBusterOnPlayer,
tankInvuln: {
en: 'Invuln Tank Buster',
de: 'Unverwundbarkeit für Tank Buster benutzen',
fr: 'Invincible sur le Tank buster',
ja: 'タンクバスター (被ダメージ上昇付き)',
cn: '易伤死刑',
ko: '무적기로 탱버 처리',
},
};
if (data.me === matches.target) {
if (data.role === 'tank') {
if (data.job === 'PLD' || data.job === 'GNB')
return { alertText: output.tankInvuln!() };
return { alertText: output.tankBusterAndSwap!() };
}
return { alarmText: output.tankBusterOnYou!() };
}
const sev = data.role === 'healer' || data.role === 'tank' ? 'alertText' : 'infoText';
return { [sev]: output.tankBusterOnPlayer!({ player: matches.target }) };
},
},
{
id: 'DelubrumSav Queen Cleansing Slash Doom',
type: 'GainsEffect',
// Each Cleansing Slash applies a cleansable Doom (38E), if damage is taken
netRegex: NetRegexes.gainsEffect({ source: 'The Queen', effectId: '38E' }),
netRegexDe: NetRegexes.gainsEffect({ source: 'Kriegsgöttin', effectId: '38E' }),
netRegexFr: NetRegexes.gainsEffect({ source: 'Garde-La-Reine', effectId: '38E' }),
netRegexJa: NetRegexes.gainsEffect({ source: 'セイブ・ザ・クイーン', effectId: '38E' }),
netRegexCn: NetRegexes.gainsEffect({ source: '天佑女王', effectId: '38E' }),
netRegexKo: NetRegexes.gainsEffect({ source: '세이브 더 퀸', effectId: '38E' }),
condition: (data) => data.CanCleanse(),
infoText: (data, matches, output) => output.text!({ player: data.ShortName(matches.target) }),
outputStrings: {
text: {
en: 'Esuna ${player}',
de: 'Medica ${player}',
fr: 'Guérison sur ${player}',
ja: '${player} にエスナ',
cn: '驱散: ${player}',
ko: '"${player}" 에스나',
},
},
},
{
id: 'DelubrumSav Queen Dispel',
type: 'GainsEffect',
// Players with Dispel should Dispel all the buffs on The Queen.
// Critical Strikes = 705 is the first one.
netRegex: NetRegexes.gainsEffect({ target: 'The Queen', effectId: '705', capture: false }),
netRegexDe: NetRegexes.gainsEffect({ target: 'Kriegsgöttin', effectId: '705', capture: false }),
netRegexFr: NetRegexes.gainsEffect({ target: 'Garde-La-Reine', effectId: '705', capture: false }),
netRegexJa: NetRegexes.gainsEffect({ target: 'セイブ・ザ・クイーン', effectId: '705', capture: false }),
netRegexCn: NetRegexes.gainsEffect({ target: '天佑女王', effectId: '705', capture: false }),
netRegexKo: NetRegexes.gainsEffect({ target: '세이브 더 퀸', effectId: '705', capture: false }),
condition: (data) => {
data.queenDispelCount = (data.queenDispelCount || 0) + 1;
// The third time she gains this effect is the enrage, and there's no need to dispel.
return data.queenDispelCount <= 2;
},
infoText: (_data, _matches, output) => output.text!(),
outputStrings: {
text: {
en: 'Dispel Queen',
de: 'Kriegsgöttin reinigen',
fr: 'Dissipez la Reine',
ja: 'ボスにディスペル',
cn: '驱魔 > 女王',
ko: '보스 디스펠',
},
},
},
{
id: 'DelubrumSav Queen Ball Lightning',
type: 'AddedCombatant',
// Players with Reflect should destroy one for party to stand in the shield left behind
netRegex: NetRegexes.addedCombatantFull({ npcNameId: '7974', capture: false }),
suppressSeconds: 1,
alertText: (_data, _matches, output) => output.text!(),
outputStrings: {
text: {
en: 'Reflect Orbs',
de: 'Reflektiere Orbs',
fr: 'Reflétez les orbes',
ja: '雷玉にリフレク',
cn: '反射雷球',
ko: '리플렉트로 구슬 처리',
},
},
},
{
id: 'DelubrumSav Queen Ball Lightning Bubble',
type: 'WasDefeated',
netRegex: NetRegexes.wasDefeated({ target: 'Ball Lightning', capture: false }),
netRegexDe: NetRegexes.wasDefeated({ target: 'Elektrosphäre', capture: false }),
netRegexFr: NetRegexes.wasDefeated({ target: 'Orbe De Foudre', capture: false }),
netRegexJa: NetRegexes.wasDefeated({ target: '雷球', capture: false }),
netRegexCn: NetRegexes.wasDefeated({ target: '雷球', capture: false }),
netRegexKo: NetRegexes.wasDefeated({ target: '뇌구', capture: false }),
suppressSeconds: 20,
alertText: (_data, _matches, output) => output.text!(),
outputStrings: {
text: {
en: 'Get in Bubble',
de: 'Geh in die Blase',
fr: 'Allez dans la bulle',
ja: '泡に入る',
cn: '进泡泡',
ko: '보호막 안에 들어가기',
},
},
},
{
id: 'DelubrumSav Queen Fiery Portent',
type: 'StartsUsing',
netRegex: NetRegexes.startsUsing({ source: 'Queen\'s Soldier', id: '5A21' }),
netRegexDe: NetRegexes.startsUsing({ source: 'Soldat Der Königin', id: '5A21' }),
netRegexFr: NetRegexes.startsUsing({ source: 'Soldat De La Reine', id: '5A21' }),
netRegexJa: NetRegexes.startsUsing({ source: 'クイーンズ・ソルジャー', id: '5A21' }),
netRegexCn: NetRegexes.startsUsing({ source: '女王士兵', id: '5A21' }),
netRegexKo: NetRegexes.startsUsing({ source: '여왕의 병사', id: '5A21' }),
delaySeconds: (_data, matches) => parseFloat(matches.castTime) - 5,
durationSeconds: 5.5,
response: Responses.stopEverything(),
},
{
id: 'DelubrumSav Queen Icy Portent',
type: 'StartsUsing',
// Assuming you need to move for 3 seconds (duration of Pyretic from Fiery Portent)
netRegex: NetRegexes.startsUsing({ source: 'Queen\'s Soldier', id: '5A22' }),
netRegexDe: NetRegexes.startsUsing({ source: 'Soldat Der Königin', id: '5A22' }),
netRegexFr: NetRegexes.startsUsing({ source: 'Soldat De La Reine', id: '5A22' }),
netRegexJa: NetRegexes.startsUsing({ source: 'クイーンズ・ソルジャー', id: '5A22' }),
netRegexCn: NetRegexes.startsUsing({ source: '女王士兵', id: '5A22' }),
netRegexKo: NetRegexes.startsUsing({ source: '여왕의 병사', id: '5A22' }),
delaySeconds: (_data, matches) => parseFloat(matches.castTime) - 5,
durationSeconds: 5.5,
response: Responses.moveAround('alert'),
},
{
id: 'DelubrumSav Queen Judgment Blade Right',
type: 'StartsUsing',
netRegex: NetRegexes.startsUsing({ source: 'The Queen', id: '59F2', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Kriegsgöttin', id: '59F2', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Garde-La-Reine', id: '59F2', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'セイブ・ザ・クイーン', id: '59F2', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '天佑女王', id: '59F2', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '세이브 더 퀸', id: '59F2', capture: false }),
alertText: (_data, _matches, output) => output.text!(),
outputStrings: {
text: {
en: 'Find Charge, Dodge Right',
de: 'Halte nach dem Ansturm ausschau, weiche nach rechts aus',
fr: 'Repérez la charge, esquivez à droite',
ja: '右へ、突進を避ける',
cn: '去右侧躲避冲锋',
ko: '돌진 찾고, 오른쪽 피하기',
},
},
},
{
id: 'DelubrumSav Queen Judgment Blade Left',
type: 'StartsUsing',
netRegex: NetRegexes.startsUsing({ source: 'The Queen', id: '59F1', capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: 'Kriegsgöttin', id: '59F1', capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: 'Garde-La-Reine', id: '59F1', capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: 'セイブ・ザ・クイーン', id: '59F1', capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: '天佑女王', id: '59F1', capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: '세이브 더 퀸', id: '59F1', capture: false }),
alertText: (_data, _matches, output) => output.text!(),
outputStrings: {
text: {
en: 'Find Charge, Dodge Left',
de: 'Halte nach dem Ansturm ausschau, weiche nach links aus',
fr: 'Repérez la charge, esquivez à gauche',
ja: '左へ、突進を避ける',
cn: '去左侧躲避冲锋',
ko: '돌진 찾고, 왼쪽 피하기',
},
},
},
{
id: 'DelubrumSav Queen Guard AoEs',
type: 'StartsUsing',
// 5A16 from Queen's Warrior
// 5A08 from Queen's Knight
// 5A35 from Queen's Gunner
// 5A23 from Queen's Soldier
// These happen in sets:
// Set 1 Double AoE, 3 seconds later Double AoE
// Set 2 5 seconds later, Double AoE, 3 seconds later Double AoE, 3 seconds later AoE + Bleed
// Set 3 1.3 seconds later, Single AoEs every 3 seconds all while bleed from set 2 persists
netRegex: NetRegexes.startsUsing({ source: ['Queen\'s Warrior', 'Queen\'s Knight', 'Queen\'s Gunner', 'Queen\'s Soldier'], id: ['5A16', '5A08', '5A35', '5A23'], capture: false }),
netRegexDe: NetRegexes.startsUsing({ source: ['Kriegerin Der Königin', 'Ritter Der Königin', 'Schütze Der Königin', 'Soldat Der Königin'], id: ['5A16', '5A08', '5A35', '5A23'], capture: false }),
netRegexFr: NetRegexes.startsUsing({ source: ['Guerrière De La Reine', 'Chevalier De La Reine', 'Fusilier De La Reine', 'Soldat De La Reine'], id: ['5A16', '5A08', '5A35', '5A23'], capture: false }),
netRegexJa: NetRegexes.startsUsing({ source: ['クイーンズ・ウォリアー', 'クイーンズ・ナイト', 'クイーンズ・ガンナー', 'クイーンズ・ソルジャー'], id: ['5A16', '5A08', '5A35', '5A23'], capture: false }),
netRegexCn: NetRegexes.startsUsing({ source: ['女王战士', '女王骑士', '女王枪手', '女王士兵'], id: ['5A16', '5A08', '5A35', '5A23'], capture: false }),
netRegexKo: NetRegexes.startsUsing({ source: ['여왕의 전사', '여왕의 기사', '여왕의 총사', '여왕의 병사'], id: ['5A16', '5A08', '5A35', '5A23'], capture: false }),
// Only call out the beginning of a set of two casts
suppressSeconds: 5,
alertText: (_data, _matches, output) => output.text!(),
outputStrings: {
text: {
en: 'Multiple AOEs',
de: 'Mehrere AoEs',
fr: 'Multiple AoEs',
ja: '連続AoE',
cn: '连续AoE',
ko: '다중 광역공격',
},
},
},
],
timelineReplace: [
{
'locale': 'en',
'replaceText': {
'Right-Sided Shockwave/Left-Sided Shockwave': 'Right/Left Shockwave',
'Left-Sided Shockwave/Right-Sided Shockwave': 'Left/Right Shockwave',
'Sword Omen/Shield Omen': 'Sword/Shield Omen',
'Shield Omen/Sword Omen': 'Shield/Sword Omen',
'Flashvane/Fury Of Bozja/Infernal Slash': 'Random Arsenal',
'Icy Portent/Fiery Portent': 'Icy/Fiery Portent',
'Fiery Portent/Icy Portent': 'Fiery/Icy Portent',
},
},
{
'locale': 'de',
'replaceSync': {
'(?<!Crowned )Marchosias': 'Marchosias',
'Aetherial Bolt': 'Magiegeschoss',
'Aetherial Burst': 'Magiebombe',
'Aetherial Orb': 'Magiekugel',
'Aetherial Sphere': 'Ätherwind',
'Aetherial Ward': 'Barriere',
'Automatic Turret': 'Selbstschuss-Gyrocopter',
'Avowed Avatar': 'Spaltteil der Eingeschworenen',
'Ball Lightning': 'Elektrosphäre',
'Ball Of Fire': 'Feuerball',
'Bicolor Golem': 'zweifarbig(?:e|er|es|en) Golem',
'Bozjan Phantom': 'Bozja-Phantom',
'Bozjan Soldier': 'Bozja-Soldat',
'Crowned Marchosias': 'Marchosias-Leittier',
'Dahu': 'Dahu',
'Dahu was defeated by': 'hat Dahu besiegt',
'Grim Reaper': 'Grausamer Schlitzer',
'Gun Turret': 'Geschützturm',
'Immolating Flame': 'Flammensturm',
'Pride of the Lion(?!ess)': 'Saal des Löwen',
'Pride of the Lioness': 'Segen der Löwin',
'Queen\'s Gunner': 'Schütze der Königin',
'Queen\'s Knight': 'Ritter der Königin',
'Queen\'s Soldier': 'Soldat der Königin',
'Queen\'s Warrior': 'Kriegerin der Königin',
'Queensheart': 'Saal der Dienerinnen',
'Ruins Golem': 'Ruinengolem',
'Sanguine Clot': 'schauerlich(?:e|er|es|en) Blutgerinsel',
'Seeker Avatar': 'Spaltteil der Sucher',
'Soldier Avatar': 'Spaltteil des Soldaten',
'Spark Arrow': 'Feuerpfeil',
'Spiritual Sphere': 'Seelenwind',
'Stuffy Wraith': 'muffig(?:e|er|es|en) Schrecken',
'Stygimoloch Lord': 'Anführer-Stygimoloch',
'Stygimoloch Monk': 'Stygimoloch',
'Stygimoloch Warrior': 'Krieger-Stygimoloch',
'Tempestuous Orb': 'groß(?:e|er|es|en) Eisball',
'The Hall of Hieromancy': 'Halle des Orakels',
'The Hall of Supplication': 'Große Gebetshalle',
'The Path of Divine Clarity': 'Sanktuarium des Lebens',
'The Queen': 'Kriegsgöttin',
'The Theater of One': 'Einsame Arena',
'The Vault of Singing Crystal': 'Ort des Klingenden Kristalls',
'Trinity Avowed': 'Trinität der Eingeschworenen',
'Trinity Seeker': 'Trinität der Sucher',
'Viscous Clot': 'zäh(?:e|er|es|en) Blutgerinsel',
'Why\\.\\.\\.won\'t\\.\\.\\.you\\.\\.\\.': 'Neiiin! Wie ist das möglich',
},
'replaceText': {
'(?<!C)Rush': 'Stürmen',
'(?<!Inescapable )Entrapment': 'Fallenlegen',
'--Spite Check--': '--Meditation Check--',
'--adds--': '--Adds--',
'--bleed--': '--Blutung--',
'--chains--': '--Ketten--',
'--stunned--': '--betäubt--',
'--tethers--': '--Verbindungen--',
'--unstunned--': '--nicht länger betäubt--',
'1111-Tonze Swing': '1111-Tonzen-Schwung',
'Above Board': 'Über dem Feld',
'Act Of Mercy': 'Schneller Stich des Dolches',
'Allegiant Arsenal': 'Waffenwechsel',
'Aura Sphere': 'Kampfwind',
'Automatic Turret': 'Selbstschuss-Gyrocopter',
'Baleful Blade': 'Stoß der Edelklinge',
'Baleful Comet': 'Flammenstapel der Edelklinge',
'Baleful Firestorm': 'Ätherflamme der Edelklinge',
'Baleful Onslaught': 'Wilder Schlitzer der Edelklinge',
'Baleful Swathe': 'Schwarzer Wirbel der Edelklinge',
'Beck And Call To Arms': 'Feuerbefehl',
'Blade Of Entropy': 'Eisflammenklinge',
'Blood And Bone': 'Wellenschlag',
'Bloody Wraith': 'blutrünstiger Schrecken',
'Bombslinger': 'Bombenabwurf',
'Boost': 'Kräfte sammeln',
'Bozjan Soldier': 'Bozja-Soldat',
'Burn': 'Verbrennung',
'Cleansing Slash': 'Säubernder Schnitt',
'Coat Of Arms': 'Trotz',
'Coerce': 'Zwang',
'Core Combustion': 'Brennender Kern',
'Crazed Rampage': 'Gereizter Wutlauf',
'Creeping Miasma': 'Miasmahauch',
'Crushing Hoof': 'Tödlicher Druck',
'Dead Iron': 'Woge der Feuerfaust',
'Death Scythe': 'Todessichel',
'Devastating Bolt': 'Heftiger Donner',
'Devour': 'Verschlingen',
'Double Gambit': 'Illusionsmagie',
'Elemental Arrow': 'Element-Pfeil',
'Elemental Blast': 'Element-Explosion',
'Elemental Brand': 'Eisflammenfluch',
'Elemental Impact': 'Einschlag',
'Empyrean Iniquity': 'Empyreische Interdiktion',
'Excruciation': 'Fürchterlicher Schmerz',
'Falling Rock': 'Steinschlag',
'Fateful Words': 'Worte des Verderbens',
'Feral Howl': 'Wildes Heulen',
'Fiery Portent': 'Fieberhitze',
'Firebreathe': 'Lava-Atem',
'First Mercy': '1. Streich: Viererdolch-Haltung',
'Flailing Strike': 'Wirbelnder Schlag',
'Flames Of Bozja': 'Bozianische Flamme',
'Flashvane': 'Schockpfeile',
'Focused Tremor': 'Kontrolliertes Beben',
'Foe Splitter': 'Tobender Teiler',
'Fool\'s Gambit': 'Bezauberungsmagie',
'Forceful Strike': 'Kraftvoller Schlag',
'Fourth Mercy': '4. Streich: Viererdolch-Haltung',
'Fracture': 'Sprengung',
'Freedom Of Bozja': 'Bozianische Freiheit',
'Fury Of Bozja': 'Bozianische Wut',
'Gleaming Arrow': 'Funkelnder Pfeil',
'Glory Of Bozja': 'Stolz von Bozja',
'Gods Save The Queen': 'Wächtergott der Königin',
'Great Ball Of Fire': 'Feuerball',
'Gun Turret': 'Geschützturm',
'Gunnhildr\'s Blades': 'Gunnhildrs Schwert',
'Head Down': 'Scharrende Hufe',
'Heaven\'s Wrath': 'Heilige Perforation',
'Higher Power': 'Elektrische Ladung',
'Hot And Cold': 'Heiß und kalt',
'Hot Charge': 'Heiße Rage',
'Hunter\'s Claw': 'Jägerklaue',
'Hysteric Assault': 'Hysterischer Ansturm',
'Ice Spikes': 'Eisstachel',
'Icy Portent': 'Frostwinde',
'Inescapable Entrapment': 'Extrem-Fallenlegen',
'Infernal Slash': 'Yama-Schnitt',
'Invert Miasma': 'Umgekehrte Miasmakontrolle',
'Iron Impact': 'Kanon der Feuerfaust',
'Iron Rose': 'Rose des Hasses der Feuerfaust',
'Iron Splitter': 'Furor der Feuerfaust',
'Judgment Blade': 'Klinge des Urteils',
'Labyrinthine Fate': 'Fluch des Herren des Labyrinths',
'Leaping Spark': 'Endloser Donner',
'Left-Sided Shockwave': 'Linke Schockwelle',
'Lethal Blow': 'Verheerender Schlag',
'Lingering Miasma': 'Miasmawolke',
'Lots Cast': 'Magieexplosion',
'Maelstrom\'s Bolt': 'Heiligenlichter',
'Malediction of Agony': 'Pochender Fluch',
'Malediction of Ruin': 'Fluch des Verfalls',
'Mana Flame': 'Manaflamme',
'Manifest Avatar': 'Teilung des Selbsts',
'Manipulate Miasma': 'Miasmakontrolle',
'Memory of the Labyrinth': 'Edikt des Herren des Labyrinths',
'Merciful Arc': 'Fächertanz des Dolches',
'Merciful Blooms': 'Kasha des Dolches',
'Merciful Breeze': 'Yukikaze des Dolches',
'Merciful Moon': 'Gekko des Dolches',
'Mercy Fourfold': 'Viererdolch',
'Metamorphose': 'Materiewandel',
'Misty Wraith': 'flüchtiger Schrecken',
'Northswain\'s Glow': 'Stella Polaris',
'Optimal Offensive': 'Beste Attacke',
'Optimal Play': 'Bestes Manöver',
'Pawn Off': 'Kranzklinge',
'Phantom Edge': 'Phantomklingen',
'Queen\'s Edict': 'Hohes Edikt der Königin',
'Queen\'s Justice': 'Hoheitliche Strafe',
'Queen\'s Shot': 'Omnidirektionalschuss',
'Queen\'s Will': 'Edikt der Königin',
'Quick March': 'Marschbefehl',
'Rapid Bolts': 'Kettenblitz',
'Rapid Sever': 'Radikale Abtrennung',
'Reading': 'Demontage',
'Relentless Battery': 'Koordiniertes Manöver',
'Relentless Play': 'Koordinierter Angriff',
'Rending Bolt': 'Fallender Donner',
'Reverberating Roar': 'Einsturzgefahr',
'Reversal Of Forces': 'Materieinversion',
'Right-Sided Shockwave': 'Rechte Schockwelle',
'Ruins Golem': 'Ruinengolem',
'Sanguine Clot': 'schauerliches Blutgerinsel',
'Seasons Of Mercy': 'Setsugekka des Dolches',
'Second Mercy': '2. Streich: Viererdolch-Haltung',
'Secrets Revealed': 'Enthüllte Geheimnisse',
'Shield Omen': 'Schildhaltung',
'Shimmering Shot': 'Glitzerpfeil',
'Shot In The Dark': 'Einhändiger Schuss',
'Sniper Shot': 'Fangschuss',
'Spiritual Sphere': 'Seelenwind',
'Spit Flame': 'Flammenspucke',
'Spiteful Spirit': 'Meditation',
'Strongpoint Defense': 'Absolutschild',
'Summon Adds': 'Add-Beschwörung',
'Summon(?! Adds)': 'Beschwörung',
'Sun\'s Ire': 'Flammenschlag',
'Surge of Vigor': 'Eifer',
'Surging Flames': 'Feuerangriff',
'Surging Flood': 'Wasserangriff',
'Swirling Miasma': 'Miasmawirbel',
'Sword Omen': 'Schwerthaltung',
'The Ends': 'Kreuzschnitt',
'The Means': 'Kreuzschlag',
'Third Mercy': '3. Streich: Viererdolch-Haltung',
'Thunderous Discharge': 'Blitznetz',
'Turret\'s Tour': 'Querschlägerhagel',
'Undying Hatred': 'Über-Psychokinese',
'Unlucky Lot': 'Magiebombe',
'Unrelenting Charge': 'Ungestümer Ansturm',
'Unseen Eye': 'Geist des Blütensturms',
'Unwavering Apparition': 'Geist des Schlächters',
'Verdant Path': 'Lehren des Grünen Pfades',
'Verdant Tempest': 'Zauberwind des Grünen Pfades',
'Vicious Swipe': 'Frenetischer Feger',
'Vile Wave': 'Welle der Boshaftigkeit',
'Viscous Clot': 'zähes Blutgerinsel',
'Weave Miasma': 'Miasmathese',
'Weight Of Fortune': 'Erdrückende Kraft',
'Whack': 'Wildes Schlagen',
'Winds Of Fate': 'Sturm der Gewalt',
'Winds Of Weight': 'Erdrückender Sturm',
'Withering Curse': 'Wichtelfluch',
'Wrath Of Bozja': 'Bozianischer Zorn',
},
},
{
'locale': 'fr',
'replaceSync': {
'(?<!Crowned )Marchosias': 'marchosias',
'Aetherial Bolt': 'petite bombe',
'Aetherial Burst': 'énorme bombe',
'Aetherial Orb': 'amas d\'éther élémentaire',
'Aetherial Sphere': 'sphère d\'éther',
'Aetherial Ward': 'Barrière magique',
'Automatic Turret': 'Auto-tourelle',
'Avowed Avatar': 'clone de la trinité féale',
'Ball Lightning': 'Orbe de Foudre',
'Ball Of Fire': 'Boule de flammes',
'Bicolor Golem': 'golem bicolore',
'Bozjan Phantom': 'fantôme bozjien',
'Bozjan Soldier': 'soldat bozjien',
'Crowned Marchosias': 'marchosias alpha',
'Dahu': 'dahu',
'Grim Reaper': 'Couperet funeste',
'Gun Turret': 'Tourelle dirigée',
'Immolating Flame': 'grande boule de feu tourbillonnante',
'Pride of the Lion(?!ess)': 'Hall du Lion',
'Pride of the Lioness': 'Bénédiction de la Lionne',
'Queen\'s Gunner': 'fusilier de la reine',
'Queen\'s Knight': 'chevalier de la reine',
'Queen\'s Soldier': 'soldat de la reine',
'Queen\'s Warrior': 'guerrière de la reine',
'Queensheart': 'Chambre des prêtresses',
'Ruins Golem': 'golem des ruines',
'Sanguine Clot': 'caillot terrifiant',
'Seeker Avatar': 'clone de la trinité soudée',
'Soldier Avatar': 'double de soldat',
'Spark Arrow': 'volée de flèches de feu',
'Spiritual Sphere': 'sphère immatérielle',
'Stuffy Wraith': 'spectre boursouflé',
'Stygimoloch Lord': 'seigneur stygimoloch',
'Stygimoloch Monk': 'stygimoloch',
'Stygimoloch Warrior': 'guerrier stygimoloch',
'Tempestuous Orb': 'grande boule de glace',
'The Hall of Hieromancy': 'Salle des oracles',
'The Hall of Supplication': 'Grande salle des prières',
'The Path of Divine Clarity': 'Salle des sages',
'The Queen': 'Garde-la-Reine',
'The Theater of One': 'Amphithéâtre en ruines',
'The Vault of Singing Crystal': 'Chambre des cristaux chantants',
'Trinity Avowed': 'trinité féale',
'Trinity Seeker': 'trinité soudée',
'Viscous Clot': 'caillot visqueux',
'Why\\.\\.\\.won\'t\\.\\.\\.you\\.\\.\\.': 'Grrroooargh.... Cette humaine... est forte...',
},
'replaceText': {
'\\?': ' ?',
'--Spite Check--': '--Vague de brutalité--',
'--adds--': '--adds--',
'--bleed--': '--saignement--',
'--chains--': '--chaînes--',
'--stunned--': '--étourdi(e)--',
'--tethers--': '--liens--',
'--unstunned--': '--non étourdi(e)--',
'(?<!C)Rush': 'Ruée',
'(?<!Inescapable )Entrapment': 'Pose de pièges',
'1111-Tonze Swing': 'Swing de 1111 tonz',
'Above Board': 'Aire de flottement',
'Act Of Mercy': 'Fendreciel rédempteur',
'Allegiant Arsenal': 'Changement d\'arme',
'Aura Sphere': 'sphère de brutalité',
'Automatic Turret': 'Auto-tourelle',
'Baleful Blade': 'Assaut singulier',
'Baleful Comet': 'Choc des flammes singulier',
'Baleful Firestorm': 'Ruée de flammes singulière',
'Baleful Onslaught': 'Fendoir singulier',
'Baleful Swathe': 'Flux de noirceur singulier',
'Beck And Call To Arms': 'Ordre d\'attaquer',
'Blade Of Entropy': 'Sabre du feu et de la glace',
'Blood And Bone': 'Onde tranchante',
'Bloody Wraith': 'spectre sanglant',
'Bombslinger': 'Jet de bombe',
'Boost': 'Renforcement',
'Bozjan Soldier': 'soldat bozjien',
'Burn': 'Combustion',
'Cleansing Slash': 'Taillade purifiante',
'Coat Of Arms': 'Bouclier directionnel',
'Coerce': 'Ordre irrefusable',
'Core Combustion': 'Noyau brûlant',
'Crazed Rampage': 'Tranchage final',
'Creeping Miasma': 'Coulée miasmatique',
'Crushing Hoof': 'Saut pesant',
'Dead Iron': 'Vague des poings de feu',
'Death Scythe': 'Faux de la mort',
'Devastating Bolt': 'Cercle de foudre',
'Devour': 'Dévoration',
'Double Gambit': 'Manipulation des ombres',
'Elemental Arrow': 'Flèche élémentaire',
'Elemental Blast': 'Explosion élémentaire',
'Elemental Brand': 'Malédiction du feu et de la glace',
'Elemental Impact': 'Impact',
'Empyrean Iniquity': 'Injustice empyréenne',
'Excruciation': 'Atroce douleur',
'Falling Rock': 'Chute de pierre',
'Fateful Words': 'Mots de calamité',
'Feral Howl': 'Rugissement sauvage',
'Fiery Portent/Icy Portent': 'Rideau de flammes/givre',
'Firebreathe': 'Souffle de lave',
'First Mercy': 'Première lame rédemptrice',
'Flailing Strike': 'Hachage rotatif',
'Flames Of Bozja': 'Flammes de Bozja',
'Flashvane(?!/)': 'Flèches fulgurantes',
'Flashvane/Fury Of Bozja/Infernal Slash': 'Arsenal aléatoire',
'Focused Tremor': 'Séisme localisé',
'Foe Splitter': 'Fendoir horizontal',
'Fool\'s Gambit': 'Manipulation des sens',
'Forceful Strike': 'Hachage surpuissant',
'Fourth Mercy': 'Quatrième lame rédemptrice',
'Fracture': 'Fracture',
'Freedom Of Bozja': 'Liberté de Bozja',
'(?<!/)Fury Of Bozja(?!/)': 'Furie de Bozja',
'Gleaming Arrow': 'Flèche miroitante',
'Glory Of Bozja': 'Gloire de Bozja',
'Gods Save The Queen': 'Que les Dieux gardent la Reine',
'Great Ball Of Fire': 'Boule de feu tourbillonante',
'Gun Turret': 'Tourelle dirigée',
'Gunnhildr\'s Blades': 'Lame de Gunnhildr',
'Head Down': 'Charge bestiale',
'Heaven\'s Wrath': 'Ire céleste',
'Higher Power': 'Charge électrique',
'Hot And Cold': 'Chaud et froid',
'Hot Charge': 'Charge brûlante',
'Hunter\'s Claw': 'Griffes prédatrices',
'Hysteric Assault': 'Assaut forcené',
'Ice Spikes': 'Pointes de glace',
'Icy Portent/Fiery Portent': 'Rideau de givre/flammes',
'Inescapable Entrapment': 'Parterre de pièges',
'(?<!/)Infernal Slash': 'Taillade de Yama',
'Invert Miasma': 'Contrôle des miasmes inversé',
'Iron Impact': 'Canon d\'ardeur des poings de feu',
'Iron Rose': 'Canon de pugnacité des poings de feu',
'Iron Splitter': 'Fracas des poings de feu',
'Judgment Blade': 'Lame du jugement',
'Labyrinthine Fate': 'Malédiction du seigneur du dédale',
'Leaping Spark': 'Éclairs en série',
'Left-Sided Shockwave/Right-Sided Shockwave': 'Onde de choc gauche/droite',
'Lethal Blow': 'Charge ultime',
'Lingering Miasma': 'Nuage miasmatique',
'Lots Cast': 'Bombe ensorcelée',
'Maelstrom\'s Bolt': 'Fulmination',
'Malediction of Agony': 'Malédiction lancinante',
'Malediction of Ruin': 'Malédiction dévastatrice',
'Mana Flame': 'Flammes de mana',
'Manifest Avatar': 'Clonage',
'Manipulate Miasma': 'Contrôle des miasmes',
'Memory of the Labyrinth': 'Appel du seigneur du dédale',
'Merciful Arc': 'Éventail rédempteur',
'Merciful Blooms': 'Kasha rédempteur',
'Merciful Breeze': 'Yukikaze rédempteur',
'Merciful Moon': 'Gekkô rédempteur',
'Mercy Fourfold': 'Quatuor de lames rédemptrices',
'Metamorphose': 'Nature changeante',
'Misty Wraith': 'spectre vaporeux',
'Northswain\'s Glow': 'Étoile du Nord',
'Optimal Offensive': 'Charge de maître d\'armes',
'Optimal Play': 'Technique de maître d\'armes',
'Pawn Off': 'Sabre tournoyant',
'Phantom Edge': 'Épées spectrales',
'Queen\'s Edict': 'Injonction de Gunnhildr',
'Queen\'s Justice': 'Châtiment royal',
'Queen\'s Shot': 'Tir tous azimuts',
'Queen\'s Will': 'Édit de Gunnhildr',
'Quick March': 'Ordre de marche',
'Rapid Bolts': 'Torrent d\'éclairs',
'Rapid Sever': 'Tranchage rapide',
'Reading': 'Analyse des faiblesses',
'Relentless Battery': 'Attaque coordonnée',
'Relentless Play': 'Ordre d\'attaque coordonnée',
'Rending Bolt': 'Pluie de foudre',
'Reverberating Roar': 'Cri disloquant',
'Reversal Of Forces': 'Inversion des masses',
'Right-Sided Shockwave/Left-Sided Shockwave': 'Onde de choc droite/gauche',
'Ruins Golem': 'golem des ruines',
'Sanguine Clot': 'caillot terrifiant',
'Seasons Of Mercy': 'Setsugekka rédempteur',
'Second Mercy': 'Deuxième lame rédemptrice',
'Secrets Revealed': 'Corporification',
'Shield Omen/Sword Omen': 'Posture du bouclier/épée',
'Shimmering Shot': 'Flèches scintillantes',
'Shot In The Dark': 'Tir à une main',
'Sniper Shot': 'Entre les yeux',
'Spiritual Sphere': 'sphère immatérielle',
'Spit Flame': 'Crachat enflammé',
'Spiteful Spirit': 'Sphère de brutalité',
'Strongpoint Defense': 'Défense absolue',
'Summon(?! Adds)': 'Invocation',
'Summon Adds': 'Ajouts d\'invocation',
'Sun\'s Ire': 'Ire ardente',
'Surge of Vigor': 'Zèle',
'Surging Flames': 'Déferlante de feu',
'Surging Flood': 'Déferlante d\'eau',
'Swirling Miasma': 'Anneau miasmatique',
'Sword Omen/Shield Omen': 'Posture de l\'épée/bouclier',
'The Ends': 'Croix lacérante',
'The Means': 'Croix perforante',
'Third Mercy': 'Troisième lame rédemptrice',
'Thunderous Discharge': 'Déflagration de foudre',
'Turret\'s Tour': 'Ricochets frénétiques',
'Undying Hatred': 'Psychokinèse',
'Unlucky Lot': 'Déflagration éthérée',
'Unrelenting Charge': 'Charge frénétique',
'Unseen Eye': 'Spectres de l\'ouragan de fleurs',
'Unwavering Apparition': 'Spectres du chevalier implacable',
'Verdant Path': 'École de la Voie verdoyante',
'Verdant Tempest': 'Tempête de la Voie verdoyante',
'Vicious Swipe': 'Vrille tranchante',
'Vile Wave': 'Vague de malveillance',
'Viscous Clot': 'caillot visqueux',
'Weave Miasma': 'Miasmologie',
'Weight Of Fortune': 'Pesanteur excessive',
'Whack': 'Tannée',
'Winds Of Fate': 'Tornade puissante',
'Winds Of Weight': 'Pesanteur et légèreté',
'Withering Curse': 'Malédiction de nanisme',
'Wrath Of Bozja': 'Courroux de Bozja',
},
},
{
'locale': 'ja',
'missingTranslations': true,
'replaceSync': {
'(?<!Crowned )Marchosias': 'マルコシアス',
'Aetherial Bolt': '魔弾',
'Aetherial Burst': '大魔弾',
'Aetherial Orb': '魔力塊',
'Aetherial Sphere': '魔気',
'Aetherial Ward': '魔法障壁',
'Automatic Turret': 'オートタレット',
'Avowed Avatar': 'アヴァウドの分体',
'Ball Lightning': '雷球',
'Ball Of Fire': '火炎球',
'Bicolor Golem': 'バイカラー・ゴーレム',
'Bozjan Phantom': 'ボズヤ・ファントム',
'Bozjan Soldier': 'ボズヤ・ソルジャー',
'Crowned Marchosias': 'アルファ・マルコシアス',
'Dahu': 'ダウー',
'Grim Reaper': 'グリムクリーバー',
'Gun Turret': 'ガンタレット',
'Immolating Flame': '大火焔',
'Pride of the Lion(?!ess)': '雄獅子の広間',
'Pride of the Lioness': '雌獅子の加護',
'Queen\'s Gunner': 'クイーンズ・ガンナー',
'Queen\'s Knight': 'クイーンズ・ナイト',
'Queen\'s Soldier': 'クイーンズ・ソルジャー',
'Queen\'s Warrior': 'クイーンズ・ウォリアー',
'Queensheart': '巫女たちの広間',
'Ruins Golem': 'ルーイン・ゴーレム',
'Sanguine Clot': 'オウガリッシュ・クロット',
'Seeker Avatar': 'シーカーの分体',
'Soldier Avatar': 'ソルジャーの分体',
'Spark Arrow': 'ファイアアロー',
'Spiritual Sphere': '霊気',
'Stuffy Wraith': 'スタフィー・レイス',
'Stygimoloch Lord': 'スティギモロク・ロード',
'Stygimoloch Monk': 'スティギモロク',
'Stygimoloch Warrior': 'スティギモロク・ウォリアー',
'Tempestuous Orb': '大氷球',
'The Hall of Hieromancy': '託宣所',
'The Hall of Supplication': '大祈祷所',
'The Path of Divine Clarity': '命の至聖所',
'The Queen': 'セイブ・ザ・クイーン',
'The Theater of One': '円形劇場跡',
'The Vault of Singing Crystal': '響き合う水晶の間',
'Trinity Avowed': 'トリニティ・アヴァウド',
'Trinity Seeker': 'トリニティ・シーカー',
'Viscous Clot': 'ヴィスカス・クロット',
'Why\\.\\.\\.won\'t\\.\\.\\.you\\.\\.\\.': 'グオオオォォ…… 敗レル……ナンテ……',
},
'replaceText': {
'(?<!C)Rush': '突進',
'(?<!Inescapable )Entrapment': '掛罠',
'--adds--': '--雑魚--',
'--chains--': '--鎖--',
'1111-Tonze Swing': '1111トンズ・スイング',
'Above Board': '浮遊波',
'Act Of Mercy': '破天鋭刃風',
'Allegiant Arsenal': 'ウェポンチェンジ',
'Aura Sphere': '闘気',
'Automatic Turret': 'オートタレット',
'Baleful Blade': '豪剣強襲撃',
'Baleful Comet': '豪剣焔襲撃',
'Baleful Firestorm': '豪剣魔炎旋',
'Baleful Onslaught': '豪剣激烈斬',
'Baleful Swathe': '豪剣黒流破',
'Beck And Call To Arms': '攻撃命令',
'Blade Of Entropy': '氷炎刃',
'Blood And Bone': '波動斬',
'Bloody Wraith': 'ブラッディ・レイス',
'Bombslinger': '爆弾投擲',
'Boost': 'ためる',
'Bozjan Soldier': 'ボズヤ・ソルジャー',
'Burn': '燃焼',
'Cleansing Slash': '乱命割殺斬',
'Coat Of Arms': '偏向防御',
'Coerce': '強要',
'Core Combustion': '心核熱',
'Crazed Rampage': 'キリキリ舞い',
'Creeping Miasma': '瘴気流',
'Crushing Hoof': '重圧殺',
'Dead Iron': '熱拳振動波',
'Death Scythe': 'デスサイズ',
'Devastating Bolt': '激雷',
'Devour': '捕食',
'Double Gambit': '幻影術',
'Elemental Brand': '氷炎の呪印',
'Elemental Impact': '着弾',
'Empyrean Iniquity': '天魔鬼神爆',
'Excruciation': '激痛',
'Falling Rock': '落石',
'Fateful Words': '呪いの言葉',
'Feral Howl': 'フェラルハウル',
'Fiery Portent': '熱気術',
'Firebreathe': 'ラーヴァブレス',
'First Mercy': '初手:鋭刃四刀の構え',
'Flailing Strike': '回転乱打',
'Flames Of Bozja': 'フレイム・オブ・ボズヤ',
'Flashvane': 'フラッシュアロー',
'Focused Tremor': '局所地震',
'Foe Splitter': 'マキ割り',
'Fool\'s Gambit': '幻惑術',
'Forceful Strike': '剛力の一撃',
'Fourth Mercy': '四手:鋭刃四刀の構え',
'Fracture': '炸裂',
'Freedom Of Bozja': 'リバティ・オブ・ボズヤ',
'Fury Of Bozja': 'フューリー・オブ・ボズヤ',
'Gleaming Arrow': 'グリッターアロー',
'Glory Of Bozja': 'グローリー・オブ・ボズヤ',
'Gods Save The Queen': 'ゴッド・セイブ・ザ・クイーン',
'Great Ball Of Fire': '火球',
'Gun Turret': 'ガンタレット',
'Gunnhildr\'s Blades': 'グンヒルドの剣',
'Head Down': 'ビーストチャージ',
'Heaven\'s Wrath': '聖光爆裂斬',
'Higher Power': '雷気充填',
'Hot And Cold': '氷炎乱流',
'Hot Charge': 'ホットチャージ',
'Hunter\'s Claw': 'ハンタークロウ',
'Hysteric Assault': 'ヒステリックアサルト',
'Ice Spikes': 'アイススパイク',
'Icy Portent': '冷気術',
'Inescapable Entrapment': '掛罠祭り',
'Infernal Slash': 'ヤーマスラッシュ',
'Invert Miasma': '反転瘴気操作',
'Iron Impact': '熱拳烈気砲',
'Iron Rose': '熱拳闘気砲',
'Iron Splitter': '熱拳地脈爆',
'Judgment Blade': '不動無明剣',
'Labyrinthine Fate': '迷宮王の呪い',
'Leaping Spark': '連雷',
'Left-Sided Shockwave': 'レフトサイド・ショックウェーブ',
'Lethal Blow': '必殺の一撃',
'Lingering Miasma': '瘴気雲',
'Lots Cast': '魔爆発',
'Maelstrom\'s Bolt': '天鼓雷音稲妻斬',
'Malediction of Agony': '苦悶の呪詛',
'Malediction of Ruin': '破滅の呪詛',
'Mana Flame': 'マナフレイム',
'Manifest Avatar': '分体生成',
'Manipulate Miasma': '瘴気操作',
'Memory of the Labyrinth': '迷宮王の大号令',
'Merciful Arc': '鋭刃舞踏扇',
'Merciful Blooms': '鋭刃花車',
'Merciful Breeze': '鋭刃雪風',
'Merciful Moon': '鋭刃月光',
'Mercy Fourfold': '鋭刃四刀流',
'Metamorphose': '性質変化',
'Misty Wraith': 'ミスティ・レイス',
'Northswain\'s Glow': '北斗骨砕斬',
'Optimal Offensive': '武装突撃',
'Optimal Play': '武装戦技',
'Pawn Off': '旋回刃',
'Phantom Edge': '霊幻剣',
'Queen\'s Edict': '女王の大勅令',
'Queen\'s Justice': '処罰令',
'Queen\'s Shot': '全方位射撃',
'Queen\'s Will': '女王の勅令',
'Quick March': '行軍命令',
'Rapid Bolts': '多重雷',
'Rapid Sever': '滅多斬り',
'Reading': '解析',
'Relentless Battery': '連携戦技',
'Relentless Play': '連携命令',
'Rending Bolt': '雷鳴落',
'Reverberating Roar': '崩落誘発',
'Reversal Of Forces': '質量転換',
'Right-Sided Shockwave': 'ライトサイド・ショックウェーブ',
'Ruins Golem': 'ルーイン・ゴーレム',
'Sanguine Clot': 'オウガリッシュ・クロット',
'Seasons Of Mercy': '鋭刃雪月花',
'Second Mercy': '二手:鋭刃四刀の構え',
'Secrets Revealed': '実体結像',
'Shield Omen': '盾の型',
'Shimmering Shot': 'トゥインクルアロー',
'Shot In The Dark': '片手撃ち',
'Sniper Shot': '狙撃',
'Spiritual Sphere': '霊気',
'Spit Flame': 'フレイムスピット',
'Spiteful Spirit': '闘気',
'Strongpoint Defense': '絶対防御',
'Summon Adds': '雑魚召喚',
'Summon(?! Adds)': '召喚',
'Sun\'s Ire': '焼討ち',
'Surge of Vigor': '奮発',
'Surging Flames': '火攻め',
'Surging Flood': '水攻め',
'Swirling Miasma': '瘴気輪',
'Sword Omen': '剣の型',
'The Ends': '十字斬',
'The Means': '十字撃',
'Third Mercy': '三手:鋭刃四刀の構え',
'Thunderous Discharge': '雷気発散',
'Turret\'s Tour': '跳弾乱舞',
'Undying Hatred': '超ねんりき',
'Unlucky Lot': '魔爆',
'Unrelenting Charge': '爆進',
'Unseen Eye': '花嵐の幻影',
'Unwavering Apparition': '羅刹の幻影',
'Verdant Path': '翠流派',
'Verdant Tempest': '翠流魔風塵',
'Vicious Swipe': 'キリ揉み',
'Vile Wave': '悪意の波動',
'Viscous Clot': 'ヴィスカス・クロット',
'Weave Miasma': '瘴気術',
'Weight Of Fortune': '過重力',
'Whack': '乱打',
'Winds Of Fate': '大烈風',
'Winds Of Weight': '過重烈風',
'Withering Curse': 'こびとの呪い',
'Wrath Of Bozja': 'ラース・オブ・ボズヤ',
},
},
{
'locale': 'cn',
'replaceSync': {
'(?<!Crowned )Marchosias': '马可西亚斯',
'Aetherial Bolt': '魔弹',
'Aetherial Burst': '大魔弹',
'Aetherial Orb': '魔力块',
'Aetherial Sphere': '魔气',
'Aetherial Ward': '魔法障壁',
'Automatic Turret': '自动炮塔',
'Avowed Avatar': '誓约之分身',
'Ball Lightning': '雷球',
'Ball Of Fire': '火球',
'Bicolor Golem': '双色巨像',
'Bozjan Phantom': '博兹雅幻灵',
'Bozjan Soldier': '博兹雅士兵',
'Crowned Marchosias': '首领马可西亚斯',
'Dahu': '大兀',
'Grim Reaper': '死亡收割者',
'Gun Turret': '射击炮台',
'Immolating Flame': '大火焰',
'Pride of the Lion(?!ess)': '雄狮大厅',
'Pride of the Lioness': '雌狮大厅',
'Queen\'s Gunner': '女王枪手',
'Queen\'s Knight': '女王骑士',
'Queen\'s Soldier': '女王士兵',
'Queen\'s Warrior': '女王战士',
'Queensheart': '巫女大厅',
'Ruins Golem': '毁灭巨像',
'Sanguine Clot': '血色凝块',
'Seeker Avatar': '求道之分身',
'Soldier Avatar': '士兵的分身',
'Spark Arrow': '火光箭',
'Spiritual Sphere': '灵气',
'Stuffy Wraith': '沉闷幽灵',
'Stygimoloch Lord': '冥河之王',
'Stygimoloch Monk': '冥河武僧',
'Stygimoloch Warrior': '冥河战士',
'Tempestuous Orb': '大冰球',
'The Hall of Hieromancy': '神谕所',
'The Hall of Supplication': '大祈祷所',
'The Path of Divine Clarity': '生命至圣所',
'The Queen': '天佑女王',
'The Theater of One': '圆形剧场遗迹',
'The Vault of Singing Crystal': '和鸣水晶之间',
'Trinity Avowed': '誓约之三位一体',
'Trinity Seeker': '求道之三位一体',
'Viscous Clot': '粘液凝块',
'Why\\.\\.\\.won\'t\\.\\.\\.you\\.\\.\\.': '呜哦哦哦哦…… 难道会……输掉吗……',
},
'replaceText': {
'(?<!C)Rush': '突进',
'(?<!Inescapable )Entrapment': '设置陷阱',
'--Spite Check--': '--斗气波--',
'--adds--': '--小怪--',
'--bleed--': '--出血--',
'--chains--': '--锁链--',
'--stunned--': '--眩晕--',
'--tethers--': '--连线--',
'--unstunned--': '--眩晕结束--',
'1111-Tonze Swing': '千百十一吨回转',
'Above Board': '浮游波',
'Act Of Mercy': '破天慈刃风',
'Allegiant Arsenal': '变换武器',
'Aura Sphere': '斗气',
'Automatic Turret': '自动炮塔',
'Baleful Blade': '豪剑强袭击',
'Baleful Comet': '豪剑焰袭击',
'Baleful Firestorm': '豪剑魔炎旋',
'Baleful Onslaught': '豪剑激烈斩',
'Baleful Swathe': '豪剑黑流破',
'Beck And Call To Arms': '攻击命令',
'Blade Of Entropy': '冰炎刃',
'Blood And Bone': '波动斩',
'Bloody Wraith': '血腥幽灵',
'Bombslinger': '投掷炸弹',
'Boost': '蓄力',
'Bozjan Soldier': '博兹雅士兵',
'Burn': '燃烧',
'Cleansing Slash': '乱命割杀斩',
'Coat Of Arms': '偏向防御',
'Coerce': '强迫',
'Core Combustion': '核心燃烧',
'Crazed Rampage': '狂暴乱舞',
'Creeping Miasma': '瘴气流',
'Crushing Hoof': '重压杀',
'Dead Iron': '热拳振动波',
'Death Scythe': '死镰',
'Devastating Bolt': '激雷',
'Devour': '捕食',
'Double Gambit': '幻影术',
'Elemental Arrow': '元素箭',
'Elemental Blast': '元素爆破',
'Elemental Brand': '冰炎咒印',
'Elemental Impact': '中弹',
'Empyrean Iniquity': '天魔鬼神爆',
'Excruciation': '剧痛',
'Falling Rock': '落石',
'Fateful Words': '诅咒的危言',
'Feral Howl': '野性嚎叫',
'Fiery Portent': '热浪术',
'Firebreathe': '岩浆吐息',
'First Mercy': '慈悲四刀第一念',
'Flailing Strike': '回转乱打',
'Flames Of Bozja': '博兹雅火焰',
'Flashvane': '闪光箭',
'Focused Tremor': '局部地震',
'Foe Splitter': '劈裂',
'Fool\'s Gambit': '幻惑术',
'Forceful Strike': '刚力一击',
'Fourth Mercy': '慈悲四刀第四念',
'Fracture': '炸裂',
'Freedom Of Bozja': '博兹雅之自由',
'Fury Of Bozja': '博兹雅之怒',
'Gleaming Arrow': '闪耀箭',
'Glory Of Bozja': '博兹雅之荣',
'Gods Save The Queen': '神佑女王',
'Great Ball Of Fire': '火球',
'Gun Turret': '射击炮台',
'Gunnhildr\'s Blades': '女王之刃',
'Head Down': '兽性冲击',
'Heaven\'s Wrath': '圣光爆裂斩',
'Higher Power': '雷气充填',
'Hot And Cold': '冰炎乱流',
'Hot Charge': '炽热冲锋',
'Hunter\'s Claw': '狩猎者之爪',
'Hysteric Assault': '癫狂突袭',
'Ice Spikes': '冰棘屏障',
'Icy Portent': '寒气术',
'Inescapable Entrapment': '陷阱狂欢',
'Infernal Slash': '地狱斩',
'Invert Miasma': '瘴气反转',
'Iron Impact': '热拳烈气炮',
'Iron Rose': '热拳斗气炮',
'Iron Splitter': '热拳地脉爆',
'Judgment Blade': '不动无明剑',
'Labyrinthine Fate': '迷宫王的诅咒',
'Leaping Spark': '连雷',
'Left-Sided Shockwave': '左侧震荡波',
'Lethal Blow': '必杀一击',
'Lingering Miasma': '瘴气云',
'Lots Cast': '魔爆炸',
'Maelstrom\'s Bolt': '天鼓雷音惊电斩',
'Malediction of Agony': '苦闷的诅咒',
'Malediction of Ruin': '破灭的诅咒',
'Mana Flame': '魔力之炎',
'Manifest Avatar': '生成分裂体',
'Manipulate Miasma': '操作瘴气',
'Memory of the Labyrinth': '迷宫王的大号令',
'Merciful Arc': '慈悲舞动扇',
'Merciful Blooms': '慈悲花车',
'Merciful Breeze': '慈悲雪风',
'Merciful Moon': '慈悲月光',
'Mercy Fourfold': '慈悲四刀流',
'Metamorphose': '变换属性',
'Misty Wraith': '迷雾幽灵',
'Northswain\'s Glow': '北斗骨碎斩',
'Optimal Offensive': '武装突击',
'Optimal Play': '武装战技',
'Pawn Off': '旋回刃',
'Phantom Edge': '灵幻剑',
'Queen\'s Edict': '女王的大敕令',
'Queen\'s Justice': '处罚令',
'Queen\'s Shot': '全方位射击',
'Queen\'s Will': '女王的敕令',
'Quick March': '行军指令',
'Rapid Bolts': '多重雷',
'Rapid Sever': '急促斩击',
'Reading': '解析',
'Relentless Battery': '协作战技',
'Relentless Play': '协作指令',
'Rending Bolt': '雷鸣落',
'Reverberating Roar': '引发崩塌',
'Reversal Of Forces': '质量转换',
'Right-Sided Shockwave': '右侧震荡波',
'Ruins Golem': '毁灭巨像',
'Sanguine Clot': '血色凝块',
'Seasons Of Mercy': '慈悲雪月花',
'Second Mercy': '慈悲四刀第二念',
'Secrets Revealed': '实体成像',
'Shield Omen': '盾型',
'Shimmering Shot': '闪烁箭',
'Shot In The Dark': '单手射击',
'Sniper Shot': '狙击',
'Spiritual Sphere': '灵气',
'Spit Flame': '火涎',
'Spiteful Spirit': '斗气',
'Strongpoint Defense': '绝对防御',
'Summon Adds': '召唤小怪',
'Summon(?! Adds)': '召唤',
'Sun\'s Ire': '太阳之怒',
'Surge of Vigor': '奋发',
'Surging Flames': '火攻',
'Surging Flood': '水攻',
'Swirling Miasma': '瘴气圈',
'Sword Omen': '剑型',
'The Ends': '十字斩',
'The Means': '十字击',
'Third Mercy': '慈悲四刀第三念',
'Thunderous Discharge': '雷气散发',
'Turret\'s Tour': '跳弹乱舞',
'Undying Hatred': '超念力',
'Unlucky Lot': '魔爆',
'Unrelenting Charge': '高速冲锋',
'Unseen Eye': '风花舞的幻影',
'Unwavering Apparition': '罗刹的幻影',
'Verdant Path': '翠青流',
'Verdant Tempest': '翠青魔风尘',
'Vicious Swipe': '狂暴回转',
'Vile Wave': '恶意的波动',
'Viscous Clot': '粘液凝块',
'Weave Miasma': '瘴气术',
'Weight Of Fortune': '过重力',
'Whack': '乱打',
'Winds Of Fate': '大烈风',
'Winds Of Weight': '过重烈风',
'Withering Curse': '小人诅咒',
'Wrath Of Bozja': '博兹雅之愤',
},
},
{
'locale': 'ko',
'replaceSync': {
'(?<!Crowned )Marchosias': '마르코시아스',
'Aetherial Bolt': '마탄',
'Aetherial Burst': '대마탄',
'Aetherial Orb': '마력 덩어리',
'Aetherial Sphere': '마기',
'Aetherial Ward': '마법 장벽',
'Automatic Turret': '자동포탑',
'Avowed Avatar': '맹세의 분열체',
'Ball Lightning': '뇌구',
'Ball Of Fire': '화염구',
'Bicolor Golem': '두 빛깔 골렘',
'Bozjan Phantom': '보즈야 유령',
'Bozjan Soldier': '보즈야 병사',
'Crowned Marchosias': '우두머리 마르코시아스',
'Dahu': '다후',
'Grim Reaper': '음산한 난도자',
'Gun Turret': '총포탑',
'Immolating Flame': '대화염',
'Pride of the Lion(?!ess)': '수사자의 방',
'Pride of the Lioness': '암사자의 방',
'Queen\'s Gunner': '여왕의 총사',
'Queen\'s Knight': '여왕의 기사',
'Queen\'s Soldier': '여왕의 병사',
'Queen\'s Warrior': '여왕의 전사',
'Queensheart': '무녀들의 방',
'Ruins Golem': '유적 골렘',
'Sanguine Clot': '핏빛 멍울',
'Seeker Avatar': '탐구의 분열체',
'Soldier Avatar': '병사 분열체',
'Spark Arrow': '불꽃 화살',
'Spiritual Sphere': '영기',
'Stuffy Wraith': '케케묵은 망령',
'Stygimoloch Lord': '스티키몰로크 군주',
'Stygimoloch Monk': '스티키몰로크',
'Stygimoloch Warrior': '스티기몰로크 전사',
'Tempestuous Orb': '거대 얼음 구체',
'The Hall of Hieromancy': '신탁소',
'The Hall of Supplication': '대기도소',
'The Path of Divine Clarity': '생명의 지성소',
'The Queen': '세이브 더 퀸',
'The Theater of One': '원형 극장 옛터',
'The Vault of Singing Crystal': '공명하는 수정의 방',
'Trinity Avowed': '맹세의 삼위일체',
'Trinity Seeker': '탐구의 삼위일체',
'Viscous Clot': '찐득한 멍울',
'Why\\.\\.\\.won\'t\\.\\.\\.you\\.\\.\\.': '그어어어어…… 내가…… 지다니……',
},
'replaceText': {
'(?<!C)Rush': '돌진',
'(?<!Inescapable )Entrapment': '함정 놓기',
'--Spite Check--': '--투기파--',
'--adds--': '--쫄--',
'--bleed--': '--고통--',
'--chains--': '--사슬--',
'--stunned--': '--기절--',
'--tethers--': '--선 연결--',
'--unstunned--': '--기절풀림--',
'1111-Tonze Swing': '1111톤즈 휘두르기',
'Above Board': '부유파',
'Act Of Mercy': '예리한 파천풍',
'Allegiant Arsenal': '무기 변경',
'Aura Sphere': '투기',
'Automatic Turret': '자동포탑',
'Baleful Blade': '호검 강습 공격',
'Baleful Comet': '호검 화염 습격',
'Baleful Firestorm': '호검 마염선',
'Baleful Onslaught': '호검 격렬참',
'Baleful Swathe': '호검 흑류파',
'Beck And Call To Arms': '공격 명령',
'Blade Of Entropy': '얼음불 칼날',
'Blood And Bone': '파동참',
'Bloody Wraith': '핏빛 망령',
'Bombslinger': '폭탄 투척',
'Boost': '힘 모으기',
'Bozjan Soldier': '보즈야 병사',
'Burn': '연소',
'Cleansing Slash': '난명할살참',
'Coat Of Arms': '편향 방어',
'Coerce': '강요',
'Core Combustion': '심핵열',
'Crazed Rampage': '나사 튕기기',
'Creeping Miasma': '독기 흐름',
'Crushing Hoof': '육중한 압살',
'Dead Iron': '불주먹 진동파',
'Death Scythe': '죽음의 낫',
'Devastating Bolt': '격뢰',
'Devour': '포식',
'Double Gambit': '환영술',
'Elemental Arrow': '속성 화살',
'Elemental Blast': '속성 운석 폭발',
'Elemental Brand': '얼음불 저주',
'Elemental Impact': '착탄',
'Empyrean Iniquity': '천마귀신폭',
'Excruciation': '격통',
'Falling Rock': '낙석',
'Fateful Words': '저주의 말',
'Feral Howl': '야성의 포효',
'Fiery Portent': '열기술',
'Firebreathe': '용암숨',
'First Mercy': '예리한 첫 번째 검',
'Flailing Strike': '회전 난타',
'Flames Of Bozja': '보즈야 플레임',
'Flashvane': '섬광 화살',
'Focused Tremor': '국소 지진',
'Foe Splitter': '장작 패기',
'Fool\'s Gambit': '환혹술',
'Forceful Strike': '강력한 일격',
'Fourth Mercy': '예리한 네 번째 검',
'Fracture': '작렬',
'Freedom Of Bozja': '보즈야의 자유',
'Fury Of Bozja': '보즈야의 분노',
'Gleaming Arrow': '현란한 화살',
'Glory Of Bozja': '보즈야의 영광',
'Gods Save The Queen': '갓 세이브 더 퀸',
'Great Ball Of Fire': '불덩이',
'Gun Turret': '총포탑',
'Gunnhildr\'s Blades': '군힐드의 검',
'Head Down': '야수 돌격',
'Heaven\'s Wrath': '성광폭렬참',
'Higher Power': '화력 보강',
'Hot And Cold': '빙염난류',
'Hot Charge': '맹렬한 돌진',
'Hunter\'s Claw': '사냥꾼의 발톱',
'Hysteric Assault': '발작 습격',
'Ice Spikes': '얼음 보호막',
'Icy Portent': '냉기술',
'Inescapable Entrapment': '함정 대잔치',
'Infernal Slash': '연옥 베기',
'Invert Miasma': '반전 독기 조작',
'Iron Impact': '불주먹 열기포',
'Iron Rose': '불주먹 투기포',
'Iron Splitter': '불주먹 지맥 폭발',
'Judgment Blade': '부동무명검',
'Labyrinthine Fate': '미궁왕의 저주',
'Leaping Spark': '연속 번개',
'Left-Sided Shockwave': '왼쪽 충격파',
'Lethal Blow': '필살의 일격',
'Lingering Miasma': '독기 구름',
'Lots Cast': '마폭발',
'Maelstrom\'s Bolt': '천고뇌음 번개 베기',
'Malediction of Agony': '고통의 저주',
'Malediction of Ruin': '파멸의 저주',
'Mana Flame': '마나 불꽃',
'Manifest Avatar': '분열체 생성',
'Manipulate Miasma': '독기 조작',
'Memory of the Labyrinth': '미궁왕의 대호령',
'Merciful Arc': '예리한 부채검',
'Merciful Blooms': '예리한 화차',
'Merciful Breeze': '예리한 설풍',
'Merciful Moon': '예리한 월광',
'Mercy Fourfold': '예리한 사도류',
'Metamorphose': '성질 변화',
'Misty Wraith': '안개 망령',
'Northswain\'s Glow': '북두골쇄참',
'Optimal Offensive': '무장 돌격',
'Optimal Play': '무장 전술',
'Pawn Off': '선회인',
'Phantom Edge': '영환검',
'Queen\'s Edict': '여왕의 대칙령',
'Queen\'s Justice': '처벌령',
'Queen\'s Shot': '전방위 사격',
'Queen\'s Will': '여왕의 칙령',
'Quick March': '행군 명령',
'Rapid Bolts': '다중 번개',
'Rapid Sever': '마구 베기',
'Reading': '해석',
'Relentless Battery': '연계 전술',
'Relentless Play': '연계 명령',
'Rending Bolt': '번개 떨구기',
'Reverberating Roar': '낙석 유발',
'Reversal Of Forces': '질량 전환',
'Right-Sided Shockwave': '오른쪽 충격파',
'Ruins Golem': '유적 골렘',
'Sanguine Clot': '핏빛 멍울',
'Seasons Of Mercy': '예리한 설월화',
'Second Mercy': '예리한 두 번째 검',
'Secrets Revealed': '실체 이루기',
'Shield Omen': '방패 태세',
'Shimmering Shot': '반짝반짝 화살',
'Shot In The Dark': '한손 쏘기',
'Sniper Shot': '저격',
'Spiritual Sphere': '영기',
'Spit Flame': '화염 뱉기',
'Spiteful Spirit': '투기',
'Strongpoint Defense': '절대 방어',
'Summon Adds': '쫄 소환',
'Summon(?! Adds)': '소환',
'Sun\'s Ire': '태워 없애기',
'Surge of Vigor': '발분',
'Surging Flames': '불공격',
'Surging Flood': '물공격',
'Swirling Miasma': '독기 고리',
'Sword Omen': '검 태세',
'The Ends': '십자참',
'The Means': '십자격',
'Third Mercy': '예리한 세 번째 검',
'Thunderous Discharge': '번개 발산',
'Turret\'s Tour': '도탄난무',
'Undying Hatred': '초염력',
'Unlucky Lot': '마폭',
'Unrelenting Charge': '폭주 돌진',
'Unseen Eye': '꽃폭풍의 환영',
'Unwavering Apparition': '나찰의 환영',
'Verdant Path': '취일문 유파',
'Verdant Tempest': '취일문 마풍진',
'Vicious Swipe': '나사 돌리기',
'Vile Wave': '악의의 파동',
'Viscous Clot': '찐득한 멍울',
'Weave Miasma': '독기술',
'Weight Of Fortune': '무거운 무게',
'Whack': '난타',
'Winds Of Fate': '대열풍',
'Winds Of Weight': '무거운 바람',
'Withering Curse': '작아지는 저주',
'Wrath Of Bozja': '보즈야의 격노',
},
},
],
};
export default triggerSet; | the_stack |
import * as d3 from "d3";
import { assert } from "chai";
import * as Plottable from "../../src";
import * as TestMethods from "../testMethods";
describe("Dispatchers", () => {
describe("Mouse Dispatcher", () => {
describe("Basic usage", () => {
let div: d3.Selection<HTMLDivElement, any, any, any>;
let component: Plottable.Component;
beforeEach(() => {
component = new Plottable.Component();
div = TestMethods.generateDiv();
component.anchor(div);
});
it("creates only one Dispatcher.Mouse per <div> using getDispatcher() ", () => {
const dispatcher1 = Plottable.Dispatchers.Mouse.getDispatcher(component);
assert.isNotNull(dispatcher1, "created a new Dispatcher on an SVG");
const dispatcher2 = Plottable.Dispatchers.Mouse.getDispatcher(component);
assert.strictEqual(dispatcher1, dispatcher2, "returned the existing Dispatcher if called again with same <div>");
div.remove();
});
it("returns non-null value for default lastMousePosition()", () => {
const mouseDispatcher = Plottable.Dispatchers.Mouse.getDispatcher(component);
const point = mouseDispatcher.lastMousePosition();
assert.isNotNull(point, "returns a value after initialization");
assert.isNotNull(point.x, "x value is set");
assert.isNotNull(point.y, "y value is set");
div.remove();
});
});
describe("Callbacks", () => {
const targetX = 17;
const targetY = 76;
const expectedPoint = {
x: targetX,
y: targetY,
};
let component: Plottable.Component;
let div: d3.Selection<HTMLDivElement, any, any, any>;
let mouseDispatcher: Plottable.Dispatchers.Mouse;
beforeEach(() => {
const DIV_WIDTH = 400;
const DIV_HEIGHT = 400;
component = new Plottable.Component();
div = TestMethods.generateDiv(DIV_WIDTH, DIV_HEIGHT);
component.anchor(div);
component.computeLayout();
mouseDispatcher = Plottable.Dispatchers.Mouse.getDispatcher(component);
});
it("calls the mouseDown callback", () => {
let callbackWasCalled = false;
const callback = (point: Plottable.Point, event: MouseEvent) => {
callbackWasCalled = true;
TestMethods.assertPointsClose(point, expectedPoint, 0.5, "mouse position is correct");
assert.isNotNull(event, "mouse event was passed to the callback");
assert.instanceOf(event, MouseEvent, "the event passed is an instance of MouseEvent");
};
assert.strictEqual(mouseDispatcher.onMouseDown(callback), mouseDispatcher,
"setting the mouseDown callback returns the dispatcher");
TestMethods.triggerFakeMouseEvent("mousedown", div, targetX, targetY);
assert.isTrue(callbackWasCalled, "callback was called on mouseDown");
assert.strictEqual(mouseDispatcher.offMouseDown(callback), mouseDispatcher,
"unsetting the mouseDown callback returns the dispatcher");
callbackWasCalled = false;
TestMethods.triggerFakeMouseEvent("mousedown", div, targetX, targetY);
assert.isFalse(callbackWasCalled, "callback was disconnected from the dispatcher");
div.remove();
});
it("calls the mouseUp callback", () => {
let callbackWasCalled = false;
const callback = (point: Plottable.Point, event: MouseEvent) => {
callbackWasCalled = true;
TestMethods.assertPointsClose(point, expectedPoint, 0.5, "mouse position is correct");
assert.isNotNull(event, "mouse event was passed to the callback");
assert.instanceOf(event, MouseEvent, "the event passed is an instance of MouseEvent");
};
assert.strictEqual(mouseDispatcher.onMouseUp(callback), mouseDispatcher,
"setting the mouseUp callback returns the dispatcher");
TestMethods.triggerFakeMouseEvent("mouseup", div, targetX, targetY);
assert.isTrue(callbackWasCalled, "callback was called on mouseUp");
assert.strictEqual(mouseDispatcher.offMouseUp(callback), mouseDispatcher,
"unsetting the mouseUp callback returns the dispatcher");
callbackWasCalled = false;
TestMethods.triggerFakeMouseEvent("mouseup", div, targetX, targetY);
assert.isFalse(callbackWasCalled, "callback was disconnected from the dispatcher");
div.remove();
});
it("calls the wheel callback", () => {
// HACKHACK PhantomJS doesn't implement fake creation of WheelEvents
// https://github.com/ariya/phantomjs/issues/11289
if (window.PHANTOMJS) {
div.remove();
return;
}
const targetDeltaY = 10;
let callbackWasCalled = false;
const callback = (point: Plottable.Point, event: WheelEvent) => {
callbackWasCalled = true;
assert.strictEqual(event.deltaY, targetDeltaY, "deltaY value was passed to callback");
TestMethods.assertPointsClose(point, expectedPoint, 0.5, "mouse position is correct");
assert.isNotNull(event, "mouse event was passed to the callback");
assert.instanceOf(event, MouseEvent, "the event passed is an instance of MouseEvent");
};
assert.strictEqual(mouseDispatcher.onWheel(callback), mouseDispatcher,
"setting the wheel callback returns the dispatcher");
TestMethods.triggerFakeWheelEvent("wheel", div, targetX, targetY, targetDeltaY);
assert.isTrue(callbackWasCalled, "callback was called on wheel");
assert.strictEqual(mouseDispatcher.offWheel(callback), mouseDispatcher,
"unsetting the wheel callback returns the dispatcher");
callbackWasCalled = false;
TestMethods.triggerFakeWheelEvent("wheel", div, targetX, targetY, targetDeltaY);
assert.isFalse(callbackWasCalled, "callback was disconnected from the dispatcher");
div.remove();
});
it("calls the dblClick callback", () => {
let callbackWasCalled = false;
const callback = (point: Plottable.Point, event: MouseEvent) => {
callbackWasCalled = true;
TestMethods.assertPointsClose(point, expectedPoint, 0.5, "mouse position is correct");
assert.isNotNull(event, "mouse event was passed to the callback");
assert.instanceOf(event, MouseEvent, "the event passed is an instance of MouseEvent");
};
assert.strictEqual(mouseDispatcher.onDblClick(callback), mouseDispatcher,
"setting the dblClick callback returns the dispatcher");
TestMethods.triggerFakeMouseEvent("dblclick", div, targetX, targetY);
assert.isTrue(callbackWasCalled, "callback was called on dblClick");
assert.strictEqual(mouseDispatcher.offDblClick(callback), mouseDispatcher,
"unsetting the dblClick callback returns the dispatcher");
callbackWasCalled = false;
TestMethods.triggerFakeMouseEvent("dblclick", div, targetX, targetY);
assert.isFalse(callbackWasCalled, "callback was disconnected from the dispatcher");
div.remove();
});
it("calls mouseMove callback on mouseover, mousemove, and mouseout", () => {
let callbackWasCalled = false;
const callback = (point: Plottable.Point, event: MouseEvent) => {
callbackWasCalled = true;
TestMethods.assertPointsClose(point, expectedPoint, 0.5, "mouse position is correct");
assert.isNotNull(event, "mouse event was passed to the callback");
assert.instanceOf(event, MouseEvent, "the event passed is an instance of MouseEvent");
};
mouseDispatcher.onMouseMove(callback);
TestMethods.triggerFakeMouseEvent("mouseover", div, targetX, targetY);
assert.isTrue(callbackWasCalled, "callback was called on mouseover");
callbackWasCalled = false;
TestMethods.triggerFakeMouseEvent("mousemove", div, targetX, targetY);
assert.isTrue(callbackWasCalled, "callback was called on mousemove");
callbackWasCalled = false;
TestMethods.triggerFakeMouseEvent("mouseout", div, targetX, targetY);
assert.isTrue(callbackWasCalled, "callback was called on mouseout");
mouseDispatcher.offMouseMove(callback);
callbackWasCalled = false;
TestMethods.triggerFakeMouseEvent("mouseover", div, targetX, targetY);
assert.isFalse(callbackWasCalled, "disconnected dispatcher callback not called on mouseover");
TestMethods.triggerFakeMouseEvent("mousemove", div, targetX, targetY);
assert.isFalse(callbackWasCalled, "disconnected dispatcher callback not called on mousemove");
TestMethods.triggerFakeMouseEvent("mouseout", div, targetX, targetY);
assert.isFalse(callbackWasCalled, "disconnected dispatcher callback not called on mouseout");
div.remove();
});
it("can register two callbacks for the same mouse dispatcher", () => {
let cb1Called = false;
const cb1 = () => cb1Called = true;
let cb2Called = false;
const cb2 = () => cb2Called = true;
mouseDispatcher.onMouseMove(cb1);
mouseDispatcher.onMouseMove(cb2);
TestMethods.triggerFakeMouseEvent("mousemove", div, targetX, targetY);
assert.isTrue(cb1Called, "callback 1 was called on mousemove");
assert.isTrue(cb2Called, "callback 2 was called on mousemove");
cb1Called = false;
cb2Called = false;
mouseDispatcher.offMouseMove(cb1);
TestMethods.triggerFakeMouseEvent("mousemove", div, targetX, targetY);
assert.isFalse(cb1Called, "callback was not called after blanking");
assert.isTrue(cb2Called, "callback 2 was still called");
mouseDispatcher.offMouseMove(cb2);
div.remove();
});
it("doesn't call callbacks if not in the DOM", () => {
let callbackWasCalled = false;
const callback = () => callbackWasCalled = true;
mouseDispatcher.onMouseMove(callback);
TestMethods.triggerFakeMouseEvent("mousemove", div, targetX, targetY);
assert.isTrue(callbackWasCalled, "callback was called on mousemove");
div.remove();
callbackWasCalled = false;
TestMethods.triggerFakeMouseEvent("mousemove", div, targetX, targetY);
assert.isFalse(callbackWasCalled, "callback was not called after <div> was removed from DOM");
mouseDispatcher.offMouseMove(callback);
});
it("doesn't call callbacks for clicks if obscured by overlay", () => {
let callbackWasCalled = false;
const callback = () => callbackWasCalled = true;
mouseDispatcher.onMouseDown(callback);
TestMethods.triggerFakeMouseEvent("mousedown", div, targetX, targetY);
assert.isTrue(callbackWasCalled, "callback was called on mousedown");
let element = <HTMLElement> <any> div.node();
// Getting the absolute coordinates of the SVG in order to place the overlay at the right location
const topLeftCorner = { x: 0, y: 0 };
while (element != null) {
topLeftCorner.x += (element.offsetLeft || element.clientLeft || 0);
topLeftCorner.y += (element.offsetTop || element.clientTop || 0);
element = <HTMLElement> (element.offsetParent || element.parentNode);
}
const overlay = TestMethods.getElementParent().append("div").styles({
height: "400px",
width: "400px",
topLeftCorner: "absolute",
top: topLeftCorner.y + "px",
left: topLeftCorner.x + "px",
});
callbackWasCalled = false;
TestMethods.triggerFakeMouseEvent("mousedown", overlay, targetX, targetY);
assert.isFalse(callbackWasCalled, "callback was not called on mousedown on overlay");
mouseDispatcher.offMouseDown(callback);
div.remove();
overlay.remove();
});
});
});
}); | the_stack |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.